def main(argv): CFG = bootstrap.configure(argv=argv) if CFG.test.type == "ping": sender_count = int(CFG.test.concur) print "sender count %d" % (sender_count) thrs = [] start_time = datetime.datetime.now() for i in range(0, sender_count): sender = DashiConcurScalePinger(CFG, i) thrs.append(sender) sender.start() msg_count_total = 0 for t in thrs: t.join() msg_count_total = msg_count_total + t.message_count end_time = datetime.datetime.now() tm = end_time - start_time preci = float(tm.microseconds) / 1000000.0 runtime = tm.seconds + preci res = {} res['testname'] = "concurtest" res['message_count'] = msg_count_total res['runtime'] = runtime res['process_type'] = "pinger" res['connection_count'] = len(thrs) print "JSON: %s" % (json.dumps(res)) else: print "ponger go" receiver = DashiConcurScalePonger(CFG) receiver.go()
def __init__(self, exchange=None, pidantic_dir=None, amqp_uri=None, config=None, sysname=None): configs = ["epuharness"] config_files = get_config_paths(configs) if config: config_files.append(config) self.CFG = bootstrap.configure(config_files) self.sysname = sysname self.logdir = self.CFG.epuharness.logdir self.pidantic_dir = (pidantic_dir or os.environ.get('EPUHARNESS_PERSISTENCE_DIR') or self.CFG.epuharness.pidantic_dir) self.exchange = exchange or self.CFG.server.amqp.get( 'exchange', None) or str(uuid.uuid4()) self.CFG.server.amqp.exchange = self.exchange self.CFG.dashi.sysname = sysname self.dashi = bootstrap.dashi_connect(self.CFG.dashi.topic, self.CFG, amqp_uri=amqp_uri, sysname=sysname) self.amqp_cfg = dict(self.CFG.server.amqp) self.factory = None self.savelogs_dir = None
def main(argv): CFG = bootstrap.configure(argv=argv) if CFG.test.type == "ping": sender = DashiScaleCallPinger(CFG) sender.go() res = sender.get_results() print "JSON: %s" % (json.dumps(res)) else: receiver = DashiScaleCallPonger(CFG) receiver.go()
def build_cfg(args): config_files = [] c = os.path.join(determine_path(), "config", "default.yml") if os.path.exists(c): config_files.append(c) else: raise Exception("default configuration file not found") CFG = bootstrap.configure(config_files=config_files, argv=args) validate_config(CFG) return CFG
def main(argv): CFG = bootstrap.configure(argv=argv) if CFG.test.type == "S": sender = DashiScaleSender(CFG) sender.go() res = sender.get_results() else: receiver = DashiScaleReceiver(CFG) receiver.go() res = receiver.get_results() print("JSON: %s" % (json.dumps(res)))
def __init__(self, *args, **kwargs): configs = ["service", "dtrs"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) amqp_uri = kwargs.get('amqp_uri') self.amqp_uri = amqp_uri self.sysname = kwargs.get('sysname') self.dashi = bootstrap.dashi_connect(self.CFG.dtrs.service_name, self.CFG, self.amqp_uri, self.sysname) store = kwargs.get('store') self.store = store or get_dtrs_store(self.CFG) self.store.initialize() self.core = DTRSCore(self.store)
def __init__(self, *args, **kwargs): configs = ["service", "highavailability"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) exchange = kwargs.get("exchange") if exchange: self.CFG.server.amqp.exchange = exchange self.topic = kwargs.get("service_name") or self.CFG.highavailability.get("service_name") or DEFAULT_TOPIC self.amqp_uri = kwargs.get("amqp_uri") or None self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, self.amqp_uri, sysname=kwargs.get("sysname")) process_dispatchers = kwargs.get("process_dispatchers") or self.CFG.highavailability.processdispatchers policy_name = self.CFG.highavailability.policy.name try: policy_map[policy_name.lower()] self.policy = policy_name.lower() except KeyError: raise Exception("HA Service doesn't support '%s' policy" % policy_name) policy_parameters = kwargs.get("policy_parameters") or self.CFG.highavailability.policy.parameters process_definition_id = kwargs.get("process_definition_id") or self.CFG.highavailability.process_definition_id self.policy_interval = kwargs.get("policy_interval") or self.CFG.highavailability.policy.interval self.control = DashiHAProcessControl(self.dashi, process_dispatchers) core = HighAvailabilityCore self.core = core( self.CFG.highavailability, self.control, process_dispatchers, self.policy, parameters=policy_parameters, process_definition_id=process_definition_id, )
def __init__(self, exchange=None, pidantic_dir=None, amqp_uri=None, config=None, sysname=None): configs = ["epuharness"] config_files = get_config_paths(configs) if config: config_files.append(config) self.CFG = bootstrap.configure(config_files) self.sysname = sysname self.logdir = self.CFG.epuharness.logdir self.pidantic_dir = (pidantic_dir or os.environ.get('EPUHARNESS_PERSISTENCE_DIR') or self.CFG.epuharness.pidantic_dir) self.exchange = exchange or self.CFG.server.amqp.get('exchange', None) or str(uuid.uuid4()) self.CFG.server.amqp.exchange = self.exchange self.CFG.dashi.sysname = sysname self.dashi = bootstrap.dashi_connect(self.CFG.dashi.topic, self.CFG, amqp_uri=amqp_uri, sysname=sysname) self.amqp_cfg = dict(self.CFG.server.amqp) self.factory = None self.savelogs_dir = None
def __init__(self, *args, **kwargs): configs = ["service", "epu_worker"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.log = logging.getLogger() try: if os.environ.get('EPU_USE_GEVENT'): bootstrap.enable_gevent() else: self.log.info("Using standard python Threading") except: self.log.warning("gevent not available. Falling back to threading") self.queue_name_work = self.CFG.queue_name_work extradict = {"queue_name_work": self.queue_name_work} cei_events.event("worker", "init_begin", extra=extradict) self.dashi = bootstrap.dashi_connect(self.topic, self.CFG)
def __init__(self, *args, **kwargs): configs = ["epuagent"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) topic = self.CFG.epuagent.get('service_name') self.topic = topic or "epu_agent_%s" % uuid.uuid4() heartbeat_dest = kwargs.get('heartbeat_dest') self.heartbeat_dest = heartbeat_dest or self.CFG.epuagent.heartbeat_dest node_id = kwargs.get('node_id') self.node_id = node_id or self.CFG.epuagent.node_id heartbeat_op = kwargs.get('heartbeat_op') self.heartbeat_op = heartbeat_op or self.CFG.epuagent.heartbeat_op period = kwargs.get('period_seconds') self.period = float(period or self.CFG.epuagent.period_seconds) # for testing, allow for not starting heartbeat automatically self.start_beat = kwargs.get('start_heartbeat', True) amqp_uri = kwargs.get('amqp_uri') sock = kwargs.get('supervisor_socket') sock = sock or self.CFG.epuagent.get('supervisor_socket') if sock: log.debug("monitoring a process supervisor at: %s", sock) self.supervisor = Supervisor(sock) else: log.debug("not monitoring process supervisor") self.supervisor = None self.core = EPUAgentCore(self.node_id, supervisor=self.supervisor) self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, amqp_uri)
def __init__(self): configs = ["service", "epumanagement"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.dashi = bootstrap.dashi_connect(self.CFG.epumanagement.service_name, self.CFG) self.default_user = self.CFG.epumanagement.get('default_user') # TODO: create ION class here or depend on epuagent repo as a dep ou_client = MockOUAgentClient() statsd_cfg = self.CFG.get('statsd') if 'mock_provisioner' in self.CFG.epumanagement and \ self.CFG.epumanagement['mock_provisioner']: prov_client = MockProvisionerClient() else: provisioner_topic = self.CFG.epumanagement.provisioner_service_name prov_client = ProvisionerClient(self.dashi, topic=provisioner_topic, statsd_cfg=statsd_cfg, client_name="epumanagement") self.service_name = self.CFG.epumanagement.get(EPUM_INITIALCONF_SERVICE_NAME, EPUM_DEFAULT_SERVICE_NAME) self.proc_name = self.CFG.epumanagement.get(EPUM_INITIALCONF_PROC_NAME, None) self.store = get_epum_store(self.CFG, service_name=self.service_name, proc_name=self.proc_name) self.store.initialize() dtrs_client = DTRSClient(self.dashi, statsd_cfg=statsd_cfg, client_name=self.CFG.epumanagement.service_name) self.epumanagement = EPUManagement(self.CFG.epumanagement, SubscriberNotifier(self.dashi), prov_client, ou_client, dtrs_client, store=self.store, statsd_cfg=statsd_cfg) # hack to inject epum reference for mock prov client if isinstance(prov_client, MockProvisionerClient): prov_client._set_epum(self.epumanagement)
def __init__(self, amqp_uri=None, topic="process_dispatcher", registry=None, store=None, epum_client=None, notifier=None, definition_id=None, domain_config=None, sysname=None): configs = ["service", "processdispatcher"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.topic = self.CFG.processdispatcher.get('service_name', topic) self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, amqp_uri=amqp_uri, sysname=sysname) engine_conf = self.CFG.processdispatcher.get('engines', {}) default_engine = self.CFG.processdispatcher.get('default_engine') process_engines = self.CFG.processdispatcher.get('process_engines') if default_engine is None and len(engine_conf.keys()) == 1: default_engine = engine_conf.keys()[0] self.store = store or get_processdispatcher_store(self.CFG) self.store.initialize() self.registry = registry or EngineRegistry.from_config(engine_conf, default=default_engine, process_engines=process_engines) self.eeagent_client = EEAgentClient(self.dashi) domain_definition_id = None base_domain_config = None # allow disabling communication with EPUM for epuharness case if epum_client: self.epum_client = epum_client domain_definition_id = definition_id base_domain_config = domain_config elif not self.CFG.processdispatcher.get('static_resources'): domain_definition_id = definition_id or self.CFG.processdispatcher.get('definition_id') base_domain_config = domain_config or self.CFG.processdispatcher.get('domain_config') epum_service_name = self.CFG.processdispatcher.get('epum_service_name', 'epu_management_service') self.epum_client = EPUManagementClient(self.dashi, epum_service_name) else: self.epum_client = None if notifier: self.notifier = notifier else: self.notifier = SubscriberNotifier(self.dashi) self.core = ProcessDispatcherCore(self.store, self.registry, self.eeagent_client, self.notifier) launch_type = self.CFG.processdispatcher.get('launch_type', 'supd') restart_throttling_config = self.CFG.processdispatcher.get('restart_throttling_config', {}) dispatch_retry_seconds = self.CFG.processdispatcher.get('dispatch_retry_seconds') self.matchmaker = PDMatchmaker(self.core, self.store, self.eeagent_client, self.registry, self.epum_client, self.notifier, self.topic, domain_definition_id, base_domain_config, launch_type, restart_throttling_config, dispatch_retry_seconds) self.doctor = PDDoctor(self.core, self.store, config=self.CFG) self.ready_event = threading.Event()
def __init__(self, *args, **kwargs): configs = ["service", "provisioner"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) ssl_no_host_check = kwargs.get('ssl_no_host_check') if ssl_no_host_check is None: ssl_no_host_check = self.CFG.get('ssl_no_host_check') if ssl_no_host_check: import libcloud.security libcloud.security.VERIFY_SSL_CERT = False store = kwargs.get('store') self.proc_name = self.CFG.provisioner.get('proc_name', "") self.store = store or get_provisioner_store(self.CFG, proc_name=self.proc_name) self.store.initialize() notifier = kwargs.get('notifier') epum_topic = self.CFG.provisioner.epu_management_service_name self.notifier = notifier or ProvisionerNotifier(self, [epum_topic]) amqp_uri = kwargs.get('amqp_uri') self.amqp_uri = amqp_uri self.topic = self.CFG.provisioner.get('service_name') self.sysname = kwargs.get('sysname') self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, self.amqp_uri, self.sysname) statsd_cfg = kwargs.get('statsd') statsd_cfg = statsd_cfg or self.CFG.get('statsd') dtrs = kwargs.get('dtrs') dtrs_topic = self.CFG.provisioner.dtrs_service_name self.dtrs = dtrs or self._get_dtrs(dtrs_topic, statsd_cfg=statsd_cfg, client_name=self.topic) contextualization_disabled = kwargs.get('contextualization_disabled') if contextualization_disabled is None: contextualization_disabled = self.CFG.get('contextualization_disabled') if not contextualization_disabled: context_client = kwargs.get('context_client') context_client = context_client or self._get_context_client() else: context_client = None default_user = kwargs.get('default_user') self.default_user = default_user or self.CFG.provisioner.get('default_user') iaas_timeout = kwargs.get('iaas_timeout') iaas_timeout = iaas_timeout or self.CFG.provisioner.get('iaas_timeout') record_reaping_max_age = kwargs.get('record_reaping_max_age') record_reaping_max_age = record_reaping_max_age or self.CFG.provisioner.get('record_reaping_max_age') core = kwargs.get('core') core = core or self._get_core() self.core = core(self.store, self.notifier, self.dtrs, context_client, iaas_timeout=iaas_timeout, statsd_cfg=statsd_cfg) leader = kwargs.get('leader') self.leader = leader or ProvisionerLeader(self.store, self.core, record_reaping_max_age=record_reaping_max_age) self.ready_event = threading.Event()