def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) rabbitmq_conf = conf_dict['epu']["run_config"]["config"]["server"]["amqp"] rabbitmq_host = rabbitmq_conf["host"] rabbitmq_username = rabbitmq_conf["username"] rabbitmq_password = rabbitmq_conf["password"] rabbitmq_exchange = "default_dashi_exchange" client_topic = "epum_client_%s" % uuid.uuid4() uri = "amqp://%s:%s@%s" % (rabbitmq_username, rabbitmq_password, rabbitmq_host) client_dashi = bootstrap.dashi_connect(client_topic, amqp_uri=uri) epum_client = EPUManagementClient(client_dashi, topic='epu_management_service') for i in range(0, 3): try: defs = epum_client.list_domain_definitions() print defs break except Exception, ex: print ex time.sleep(5)
def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) rabbitmq_conf = conf_dict['epu']["run_config"]["config"]["server"]["amqp"] rabbitmq_host = rabbitmq_conf["host"] rabbitmq_username = rabbitmq_conf["username"] rabbitmq_password = rabbitmq_conf["password"] rabbitmq_exchange = "default_dashi_exchange" uri = "amqp://%s:%s@%s" % (rabbitmq_username, rabbitmq_password, rabbitmq_host) client_topic = "provisioner_client_%s" % uuid.uuid4() client_dashi = bootstrap.dashi_connect(client_topic, amqp_uri=uri) client = ProvisionerClient(client_dashi) for i in range(0, 3): try: x = client.describe_nodes(caller="HTEdNFYDys8RdP") print x break except Exception, ex: print ex time.sleep(5)
def __init__(self, exchange=None, pidantic_dir=None, amqp_uri=None, config=None, sysname=None): configs = ["epuharness"] config_files = get_config_paths(configs) if config: config_files.append(config) self.CFG = bootstrap.configure(config_files) self.sysname = sysname self.logdir = self.CFG.epuharness.logdir self.pidantic_dir = (pidantic_dir or os.environ.get('EPUHARNESS_PERSISTENCE_DIR') or self.CFG.epuharness.pidantic_dir) self.exchange = exchange or self.CFG.server.amqp.get( 'exchange', None) or str(uuid.uuid4()) self.CFG.server.amqp.exchange = self.exchange self.CFG.dashi.sysname = sysname self.dashi = bootstrap.dashi_connect(self.CFG.dashi.topic, self.CFG, amqp_uri=amqp_uri, sysname=sysname) self.amqp_cfg = dict(self.CFG.server.amqp) self.factory = None self.savelogs_dir = None
def __init__(self, CFG): self.CFG = CFG self.dashi = bootstrap.dashi_connect(CFG.test.pinger_name, CFG) self.done = False self.end_time = None self.start_time = None self.message_count = 0 self.timer = Timer(float(CFG.test.runtime), self.timeout)
def __init__(self, CFG): self.CFG = CFG self.dashi = bootstrap.dashi_connect(CFG.test.receiver_name, CFG) self.done = False self.end_time = None self.start_time = None self.dashi.handle(self.incoming, "incoming") self.dashi.handle(self.final_msg, "final_msg") self.message_count = 0
def __init__(self, CFG): self.CFG = CFG self.dashi = bootstrap.dashi_connect(CFG.test.sender_name, CFG) self.done = False self.timer = Timer(float(CFG.test.runtime), self.timeout) self.end_time = None self.start_time = None self.message_count = 0 self.message = build_message(int(self.CFG.test.message.entry_size), int(self.CFG.test.message.entry_count))
def __init__(self, incoming, CFG, log=logging): self.CFG = CFG self.ee_name = CFG.eeagent.name self.pd_name = CFG.pd.name self.exchange = CFG.dashi.exchange self._log = log self.dashi = dashi_connect(self.pd_name, CFG) self.incoming = incoming self.dashi.handle(self.heartbeat, "heartbeat")
def __init__(self, CFG, cnt): Thread.__init__(self) self._myname = CFG.test.pinger_name + "_" + str(cnt) self.CFG = CFG self.dashi = bootstrap.dashi_connect(self._myname, CFG) self.done = False self.end_time = None self.start_time = None self.message_count = 0 self.dashi.handle(self.pong, "pong") self.timer = Timer(float(CFG.test.runtime), self.timeout)
def __init__(self, CFG, factory, log=logging): self._log = log self._log.log(logging.DEBUG, "Starting the heartbeat thread") self._dashi = bootstrap.dashi_connect(CFG.eeagent.name, CFG) self._CFG = CFG self._res = None self._interval = int(CFG.eeagent.heartbeat) self._res = None self._done = False self._factory = factory self._next_beat(datetime.datetime.now())
def __init__(self, incoming=None, CFG=None, dashi=None, ee_name=None, pd_name=None, handle_heartbeat=True, log=logging): self.CFG = CFG self.ee_name = ee_name or CFG.eeagent.name if dashi: self.dashi = dashi else: self.pd_name = pd_name or CFG.pd.name self.exchange = CFG.server.amqp.exchange self.dashi = dashi_connect(self.pd_name, CFG) self._log = log self.incoming = incoming if handle_heartbeat: self.dashi.handle(self.heartbeat, "heartbeat")
def __init__(self, CFG, process_managers_map, log): self.CFG = CFG self._process_managers_map = process_managers_map self.pd_name = CFG.pd.name self.ee_name = CFG.eeagent.name self.exchange = CFG.dashi.exchange self._log = log self._lock = threading.RLock() self.dashi = dashi_connect(self.ee_name, CFG) self.dashi.handle(self.launch_process, "launch_process") self.dashi.handle(self.terminate_process, "terminate_process") self.dashi.handle(self.dump_state, "dump_state") self.dashi.handle(self.cleanup, "cleanup")
def __init__(self, CFG, factory, log=logging): self._log = log self._log.log(logging.DEBUG, "Starting the heartbeat thread") self._dashi = bootstrap.dashi_connect(CFG.eeagent.name, CFG) self._CFG = CFG self._res = None self._interval = int(CFG.eeagent.heartbeat) self._res = None self._done = False self._factory = factory self._next_beat(datetime.datetime.now()) self._factory.set_state_change_callback(self._state_change_callback, None)
def __init__(self, *args, **kwargs): configs = ["service", "dtrs"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) amqp_uri = kwargs.get('amqp_uri') self.amqp_uri = amqp_uri self.sysname = kwargs.get('sysname') self.dashi = bootstrap.dashi_connect(self.CFG.dtrs.service_name, self.CFG, self.amqp_uri, self.sysname) store = kwargs.get('store') self.store = store or get_dtrs_store(self.CFG) self.store.initialize() self.core = DTRSCore(self.store)
def __init__(self, CFG, process_managers_map, log, core_class=None): if core_class: self.core = core_class(CFG, process_managers_map, log) else: self.core = EEAgentCore(CFG, process_managers_map, log) self.CFG = CFG self._process_managers_map = process_managers_map self.ee_name = CFG.eeagent.name self.exchange = CFG.server.amqp.exchange self._log = log self.dashi = dashi_connect(self.ee_name, CFG) self.dashi.handle(self.launch_process, "launch_process") self.dashi.handle(self.terminate_process, "terminate_process") self.dashi.handle(self.restart_process, "restart_process") self.dashi.handle(self.dump_state, "dump_state") self.dashi.handle(self.cleanup, "cleanup")
def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) rabbitmq_conf = conf_dict['epu']["run_config"]["config"]["server"]["amqp"] rabbitmq_host = rabbitmq_conf["host"] rabbitmq_username = rabbitmq_conf["username"] rabbitmq_password = rabbitmq_conf["password"] rabbitmq_exchange = "default_dashi_exchange" uri = "amqp://%s:%s@%s" % (rabbitmq_username, rabbitmq_password, rabbitmq_host) client_topic = "provisioner_client_%s" % uuid.uuid4() client_dashi = bootstrap.dashi_connect(client_topic, amqp_uri=uri) client = ProvisionerClient(client_dashi) x = client.describe_nodes(caller="HTEdNFYDys8RdP") print x return 0
def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) rabbitmq_conf = conf_dict['epu']["run_config"]["config"]["server"]["amqp"] rabbitmq_host = rabbitmq_conf["host"] rabbitmq_username = rabbitmq_conf["username"] rabbitmq_password = rabbitmq_conf["password"] rabbitmq_exchange = "default_dashi_exchange" uri = "amqp://%s:%s@%s" % (rabbitmq_username, rabbitmq_password, rabbitmq_host) client_topic = "epum_client_%s" % uuid.uuid4() client_dashi = bootstrap.dashi_connect(client_topic, amqp_uri=uri) epum_client = EPUManagementClient(client_dashi, 'epu_management_service') defs = epum_client.list_domain_definitions() print defs return 0
def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) CFG = DotDict(conf_dict['epu']["run_config"]["config"]) CFG.server.amqp.vhost = '/' client_topic = "dtrs_client_%s" % uuid.uuid4() client_dashi = bootstrap.dashi_connect(client_topic, CFG=CFG) dtrs_client = DTRSClient(dashi=client_dashi) for i in range(0, 3): try: sites = dtrs_client.list_sites() print sites break except Exception, ex: print ex time.sleep(5);
def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) CFG = DotDict(conf_dict['epu']["run_config"]["config"]) CFG.server.amqp.vhost = '/' client_topic = "epum_client_%s" % uuid.uuid4() client_dashi = bootstrap.dashi_connect(client_topic, CFG=CFG) epum_client = EPUManagementClient(client_dashi, topic='epu_management_service') for i in range(0, 3): try: defs = epum_client.list_domain_definitions() print defs break except Exception, ex: print ex time.sleep(5);
def __init__(self, *args, **kwargs): configs = ["service", "highavailability"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) exchange = kwargs.get("exchange") if exchange: self.CFG.server.amqp.exchange = exchange self.topic = kwargs.get("service_name") or self.CFG.highavailability.get("service_name") or DEFAULT_TOPIC self.amqp_uri = kwargs.get("amqp_uri") or None self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, self.amqp_uri, sysname=kwargs.get("sysname")) process_dispatchers = kwargs.get("process_dispatchers") or self.CFG.highavailability.processdispatchers policy_name = self.CFG.highavailability.policy.name try: policy_map[policy_name.lower()] self.policy = policy_name.lower() except KeyError: raise Exception("HA Service doesn't support '%s' policy" % policy_name) policy_parameters = kwargs.get("policy_parameters") or self.CFG.highavailability.policy.parameters process_definition_id = kwargs.get("process_definition_id") or self.CFG.highavailability.process_definition_id self.policy_interval = kwargs.get("policy_interval") or self.CFG.highavailability.policy.interval self.control = DashiHAProcessControl(self.dashi, process_dispatchers) core = HighAvailabilityCore self.core = core( self.CFG.highavailability, self.control, process_dispatchers, self.policy, parameters=policy_parameters, process_definition_id=process_definition_id, )
def __init__(self, exchange=None, pidantic_dir=None, amqp_uri=None, config=None, sysname=None): configs = ["epuharness"] config_files = get_config_paths(configs) if config: config_files.append(config) self.CFG = bootstrap.configure(config_files) self.sysname = sysname self.logdir = self.CFG.epuharness.logdir self.pidantic_dir = (pidantic_dir or os.environ.get('EPUHARNESS_PERSISTENCE_DIR') or self.CFG.epuharness.pidantic_dir) self.exchange = exchange or self.CFG.server.amqp.get('exchange', None) or str(uuid.uuid4()) self.CFG.server.amqp.exchange = self.exchange self.CFG.dashi.sysname = sysname self.dashi = bootstrap.dashi_connect(self.CFG.dashi.topic, self.CFG, amqp_uri=amqp_uri, sysname=sysname) self.amqp_cfg = dict(self.CFG.server.amqp) self.factory = None self.savelogs_dir = None
def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) rabbitmq_conf = conf_dict['epu']["run_config"]["config"]["server"]["amqp"] rabbitmq_host = rabbitmq_conf["host"] rabbitmq_username = rabbitmq_conf["username"] rabbitmq_password = rabbitmq_conf["password"] client_topic = "dtrs_client_%s" % uuid.uuid4() uri = "amqp://%s:%s@%s" % (rabbitmq_username, rabbitmq_password, rabbitmq_host) #dtrs = DTRS(amqp_uri=uri) client_dashi = bootstrap.dashi_connect(client_topic, amqp_uri=uri) dtrs_client = DTRSClient(dashi=client_dashi) sites = dtrs_client.list_sites() print sites return 0
def __init__(self, *args, **kwargs): configs = ["service", "epu_worker"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.log = logging.getLogger() try: if os.environ.get('EPU_USE_GEVENT'): bootstrap.enable_gevent() else: self.log.info("Using standard python Threading") except: self.log.warning("gevent not available. Falling back to threading") self.queue_name_work = self.CFG.queue_name_work extradict = {"queue_name_work": self.queue_name_work} cei_events.event("worker", "init_begin", extra=extradict) self.dashi = bootstrap.dashi_connect(self.topic, self.CFG)
def spawn_procs(self): self.dtrs = DTRS(amqp_uri=self.amqp_uri, sysname=self.sysname) self._spawn_process(self.dtrs.start) self.provisioner = ProvisionerService(sites=self.sites, store=self.store, context_client=self.context_client, notifier=self.notifier, amqp_uri=self.amqp_uri, sysname=self.sysname, default_user=self.default_user, record_reaping_max_age=self.record_reaping_max_age) self._spawn_process(self.provisioner.start) self.provisioner.ready_event.wait() client_topic = "provisioner_client_%s" % uuid.uuid4() self.client_dashi = bootstrap.dashi_connect(client_topic, amqp_uri=self.amqp_uri, sysname=self.sysname) self.client = ProvisionerClient(self.client_dashi)
def __init__(self, *args, **kwargs): configs = ["epuagent"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) topic = self.CFG.epuagent.get('service_name') self.topic = topic or "epu_agent_%s" % uuid.uuid4() heartbeat_dest = kwargs.get('heartbeat_dest') self.heartbeat_dest = heartbeat_dest or self.CFG.epuagent.heartbeat_dest node_id = kwargs.get('node_id') self.node_id = node_id or self.CFG.epuagent.node_id heartbeat_op = kwargs.get('heartbeat_op') self.heartbeat_op = heartbeat_op or self.CFG.epuagent.heartbeat_op period = kwargs.get('period_seconds') self.period = float(period or self.CFG.epuagent.period_seconds) # for testing, allow for not starting heartbeat automatically self.start_beat = kwargs.get('start_heartbeat', True) amqp_uri = kwargs.get('amqp_uri') sock = kwargs.get('supervisor_socket') sock = sock or self.CFG.epuagent.get('supervisor_socket') if sock: log.debug("monitoring a process supervisor at: %s", sock) self.supervisor = Supervisor(sock) else: log.debug("not monitoring process supervisor") self.supervisor = None self.core = EPUAgentCore(self.node_id, supervisor=self.supervisor) self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, amqp_uri)
def __init__(self): configs = ["service", "epumanagement"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.dashi = bootstrap.dashi_connect(self.CFG.epumanagement.service_name, self.CFG) self.default_user = self.CFG.epumanagement.get('default_user') # TODO: create ION class here or depend on epuagent repo as a dep ou_client = MockOUAgentClient() statsd_cfg = self.CFG.get('statsd') if 'mock_provisioner' in self.CFG.epumanagement and \ self.CFG.epumanagement['mock_provisioner']: prov_client = MockProvisionerClient() else: provisioner_topic = self.CFG.epumanagement.provisioner_service_name prov_client = ProvisionerClient(self.dashi, topic=provisioner_topic, statsd_cfg=statsd_cfg, client_name="epumanagement") self.service_name = self.CFG.epumanagement.get(EPUM_INITIALCONF_SERVICE_NAME, EPUM_DEFAULT_SERVICE_NAME) self.proc_name = self.CFG.epumanagement.get(EPUM_INITIALCONF_PROC_NAME, None) self.store = get_epum_store(self.CFG, service_name=self.service_name, proc_name=self.proc_name) self.store.initialize() dtrs_client = DTRSClient(self.dashi, statsd_cfg=statsd_cfg, client_name=self.CFG.epumanagement.service_name) self.epumanagement = EPUManagement(self.CFG.epumanagement, SubscriberNotifier(self.dashi), prov_client, ou_client, dtrs_client, store=self.store, statsd_cfg=statsd_cfg) # hack to inject epum reference for mock prov client if isinstance(prov_client, MockProvisionerClient): prov_client._set_epum(self.epumanagement)
def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) rabbitmq_conf = conf_dict['epu']["run_config"]["config"]["server"]["amqp"] rabbitmq_host = rabbitmq_conf["host"] rabbitmq_username = rabbitmq_conf["username"] rabbitmq_password = rabbitmq_conf["password"] client_topic = "dtrs_client_%s" % uuid.uuid4() uri = "amqp://%s:%s@%s" % (rabbitmq_username, rabbitmq_password, rabbitmq_host) client_dashi = bootstrap.dashi_connect(client_topic, amqp_uri=uri) dtrs_client = DTRSClient(dashi=client_dashi) for i in range(0, 3): try: sites = dtrs_client.list_sites() print sites break except Exception, ex: print ex time.sleep(5)
def main(): f = open("bootconf.json", "r") conf_dict = json.load(f) rabbitmq_conf = conf_dict['epu']["run_config"]["config"]["server"]["amqp"] rabbitmq_host = rabbitmq_conf["host"] rabbitmq_username = rabbitmq_conf["username"] rabbitmq_password = rabbitmq_conf["password"] client_topic = "dtrs_client_%s" % uuid.uuid4() uri = "amqp://%s:%s@%s" % (rabbitmq_username, rabbitmq_password, rabbitmq_host) client_dashi = bootstrap.dashi_connect(client_topic, amqp_uri=uri) dtrs_client = DTRSClient(dashi=client_dashi) for i in range(0, 3): try: sites = dtrs_client.list_sites() print sites break except Exception, ex: print ex time.sleep(5);
def __init__(self, CFG): self.CFG = CFG self.dashi = bootstrap.dashi_connect(CFG.test.ponger_name, CFG) self.done = False self.dashi.handle(self.ping, "ping") self.dashi.handle(self.final_msg, "final_msg")
def __init__(self, *args, **kwargs): configs = ["service", "provisioner"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) ssl_no_host_check = kwargs.get('ssl_no_host_check') if ssl_no_host_check is None: ssl_no_host_check = self.CFG.get('ssl_no_host_check') if ssl_no_host_check: import libcloud.security libcloud.security.VERIFY_SSL_CERT = False store = kwargs.get('store') self.proc_name = self.CFG.provisioner.get('proc_name', "") self.store = store or get_provisioner_store(self.CFG, proc_name=self.proc_name) self.store.initialize() notifier = kwargs.get('notifier') epum_topic = self.CFG.provisioner.epu_management_service_name self.notifier = notifier or ProvisionerNotifier(self, [epum_topic]) amqp_uri = kwargs.get('amqp_uri') self.amqp_uri = amqp_uri self.topic = self.CFG.provisioner.get('service_name') self.sysname = kwargs.get('sysname') self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, self.amqp_uri, self.sysname) statsd_cfg = kwargs.get('statsd') statsd_cfg = statsd_cfg or self.CFG.get('statsd') dtrs = kwargs.get('dtrs') dtrs_topic = self.CFG.provisioner.dtrs_service_name self.dtrs = dtrs or self._get_dtrs(dtrs_topic, statsd_cfg=statsd_cfg, client_name=self.topic) contextualization_disabled = kwargs.get('contextualization_disabled') if contextualization_disabled is None: contextualization_disabled = self.CFG.get('contextualization_disabled') if not contextualization_disabled: context_client = kwargs.get('context_client') context_client = context_client or self._get_context_client() else: context_client = None default_user = kwargs.get('default_user') self.default_user = default_user or self.CFG.provisioner.get('default_user') iaas_timeout = kwargs.get('iaas_timeout') iaas_timeout = iaas_timeout or self.CFG.provisioner.get('iaas_timeout') record_reaping_max_age = kwargs.get('record_reaping_max_age') record_reaping_max_age = record_reaping_max_age or self.CFG.provisioner.get('record_reaping_max_age') core = kwargs.get('core') core = core or self._get_core() self.core = core(self.store, self.notifier, self.dtrs, context_client, iaas_timeout=iaas_timeout, statsd_cfg=statsd_cfg) leader = kwargs.get('leader') self.leader = leader or ProvisionerLeader(self.store, self.core, record_reaping_max_age=record_reaping_max_age) self.ready_event = threading.Event()
def __init__(self, amqp_uri=None, topic="process_dispatcher", registry=None, store=None, epum_client=None, notifier=None, definition_id=None, domain_config=None, sysname=None): configs = ["service", "processdispatcher"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.topic = self.CFG.processdispatcher.get('service_name', topic) self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, amqp_uri=amqp_uri, sysname=sysname) engine_conf = self.CFG.processdispatcher.get('engines', {}) default_engine = self.CFG.processdispatcher.get('default_engine') process_engines = self.CFG.processdispatcher.get('process_engines') if default_engine is None and len(engine_conf.keys()) == 1: default_engine = engine_conf.keys()[0] self.store = store or get_processdispatcher_store(self.CFG) self.store.initialize() self.registry = registry or EngineRegistry.from_config(engine_conf, default=default_engine, process_engines=process_engines) self.eeagent_client = EEAgentClient(self.dashi) domain_definition_id = None base_domain_config = None # allow disabling communication with EPUM for epuharness case if epum_client: self.epum_client = epum_client domain_definition_id = definition_id base_domain_config = domain_config elif not self.CFG.processdispatcher.get('static_resources'): domain_definition_id = definition_id or self.CFG.processdispatcher.get('definition_id') base_domain_config = domain_config or self.CFG.processdispatcher.get('domain_config') epum_service_name = self.CFG.processdispatcher.get('epum_service_name', 'epu_management_service') self.epum_client = EPUManagementClient(self.dashi, epum_service_name) else: self.epum_client = None if notifier: self.notifier = notifier else: self.notifier = SubscriberNotifier(self.dashi) self.core = ProcessDispatcherCore(self.store, self.registry, self.eeagent_client, self.notifier) launch_type = self.CFG.processdispatcher.get('launch_type', 'supd') restart_throttling_config = self.CFG.processdispatcher.get('restart_throttling_config', {}) dispatch_retry_seconds = self.CFG.processdispatcher.get('dispatch_retry_seconds') self.matchmaker = PDMatchmaker(self.core, self.store, self.eeagent_client, self.registry, self.epum_client, self.notifier, self.topic, domain_definition_id, base_domain_config, launch_type, restart_throttling_config, dispatch_retry_seconds) self.doctor = PDDoctor(self.core, self.store, config=self.CFG) self.ready_event = threading.Event()