def __init__(self, name, config): self.name = name try: self.verbose = config['verbose'] title = 'DASAbstactService_%s' % self.name self.logger = PrintManager(title, self.verbose) self.dasmapping = config['dasmapping'] self.write2cache = config.get('write_cache', True) self.multitask = config['das'].get('multitask', True) self.error_expire = config['das'].get('error_expire', 300) self.dbs_global = None # to be configured at run time self.dburi = config['mongodb']['dburi'] engine = config.get('engine', None) self.gfs = db_gridfs(self.dburi) except Exception as exc: print_exc(exc) raise Exception('fail to parse DAS config') # read key/cert info try: self.ckey, self.cert = get_key_cert() except Exception as exc: print_exc(exc) self.ckey = None self.cert = None if self.multitask: nworkers = config['das'].get('api_workers', 3) thr_weights = config['das'].get('thread_weights', []) for system_weight in thr_weights: system, weight = system_weight.split(':') if system == self.name: nworkers *= int(weight) # if engine: # thr_name = 'DASAbstractService:%s:PluginTaskManager' % self.name # self.taskmgr = PluginTaskManager(\ # engine, nworkers=nworkers, name=thr_name) # self.taskmgr.subscribe() # else: # thr_name = 'DASAbstractService:%s:TaskManager' % self.name # self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name) thr_name = 'DASAbstractService:%s:TaskManager' % self.name self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name) else: self.taskmgr = None self.map = {} # to be defined by data-service implementation self._keys = None # to be defined at run-time in self.keys self._params = None # to be defined at run-time in self.parameters self._notations = {} # to be defined at run-time in self.notations self.logger.info('initialized') # define internal cache manager to put 'raw' results into cache if 'rawcache' in config and config['rawcache']: self.localcache = config['rawcache'] else: msg = 'Undefined rawcache, please check your configuration' raise Exception(msg)
def test_task_manager(self): """Test task manager""" expect = [idx for idx in range(self.size)] mypool = TaskManager() tasks = [] for idx in expect: tasks.append(mypool.spawn(worker, idx, self.data)) mypool.joinall(tasks) result = [idx for idx in self.data] self.assertEqual(result, expect)
def __init__(self, dasconfig): DASWebManager.__init__(self, dasconfig) config = dasconfig['web_server'] self.pid_pat = re.compile(r'^[a-z0-9]{32}') # TODO: self.base shall be automatically included in all tmpls self.base = config['url_base'] self.interval = config.get('status_update', 2500) self.engine = config.get('engine', None) self.check_clients = config.get('check_clients', False) nworkers = config['web_workers'] self.hot_thr = config.get('hot_threshold', 3000) self.dasconfig = dasconfig self.dburi = self.dasconfig['mongodb']['dburi'] self.lifetime = self.dasconfig['mongodb']['lifetime'] self.queue_limit = config.get('queue_limit', 50) qtype = config.get('qtype', 'Queue') qfreq = config.get('qfreq', 5) if qtype not in ['Queue', 'PriorityQueue']: msg = 'Wrong queue type, qtype=%s' % qtype raise Exception(msg) # if self.engine: # thr_name = 'DASWebService:PluginTaskManager' # self.taskmgr = PluginTaskManager(bus=self.engine, \ # nworkers=nworkers, name=thr_name, qtype=qtype, \ # qfreq=qfreq) # self.taskmgr.subscribe() # else: # thr_name = 'DASWebService:TaskManager' # self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name, \ # qtype=qtype, qfreq=qfreq) thr_name = 'DASWebService:TaskManager' self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name, \ qtype=qtype, qfreq=qfreq) self.adjust = config.get('adjust_input', False) self.dasmgr = None # defined at run-time via self.init() self.reqmgr = None # defined at run-time via self.init() self.daskeys = [] # defined at run-time via self.init() self.colors = {} # defined at run-time via self.init() self.dbs_url = None # defined at run-time via self.init() self.dbs_global = None # defined at run-time via self.init() self.dbs_instances = [] # defined at run-time via self.init() self.kws = None # defined at run-time via self.init() self.q_rewriter = None # defined at run-time via self.init() self.dataset_daemon = None self.dbsmgr = {} # dbs_urls vs dbs_daemons, defined at run-time self.daskeyslist = [] # list of DAS keys self.init() self.dbs_init(config) # Monitoring thread which performs auto-reconnection thname = 'dascore_monitor' start_new_thread(thname, dascore_monitor, \ ({'das':self.dasmgr, 'uri':self.dburi}, self.init, 5))
def test_assign_priority(self): """Test priority assignment""" tasks = TaskManager(qtype='PriorityQueue', qfreq=10) uid1 = '1.1.1.1' tasks._uids.add(uid1) uid2 = '2.2.2.2' tasks._uids.add(uid1) result = tasks.assign_priority(uid1) # no tasks in a queue self.assertEqual(int(result), 0) tasks._tasks = TestQueue(empty=False) res1 = [tasks._uids.add(uid1) for r in range(20)] self.assertEqual(int(tasks.assign_priority(uid1)), 2) res2 = [tasks._uids.add(uid2) for r in range(50)] self.assertEqual(int(tasks.assign_priority(uid2)), 5)
def test_priority_task_manager(self): """Test priority task manager""" data = [idx for idx in range(0, 30)] shared_data = Array('i', len(data)) mypool = TaskManager(qtype='PriorityQueue', qfreq=10) tasks = [] for idx in data: if idx % 2: tasks.append(mypool.spawn(worker, idx, shared_data, uid=1)) else: tasks.append(mypool.spawn(worker, idx, shared_data, uid=2)) mypool.joinall(tasks) result = [idx for idx in shared_data] self.assertEqual(result, data)
def __init__(self, config=None, debug=0, nores=False, logger=None, engine=None, multitask=True): if config: dasconfig = config else: dasconfig = das_readconfig() verbose = dasconfig['verbose'] self.stdout = debug if isinstance(debug, int) and debug: self.verbose = debug dasconfig['verbose'] = debug else: self.verbose = verbose das_timer('DASCore::init', self.verbose) self.operators = das_operators() self.collect_wait_time = dasconfig['das'].get('collect_wait_time', 120) # set noresults option self.noresults = False if nores: dasconfig['write_cache'] = True self.noresults = nores self.init_expire = dasconfig['das'].get('init_expire', 5 * 60) self.multitask = dasconfig['das'].get('multitask', True) if debug or self.verbose: self.multitask = False # in verbose mode do not use multitask dasconfig['das']['multitask'] = False if not multitask: # explicitly call DASCore ctor self.multitask = False dasconfig['das']['multitask'] = False dasconfig['engine'] = engine if self.multitask: nworkers = dasconfig['das'].get('core_workers', 5) # if engine: # thr_name = 'DASCore:PluginTaskManager' # self.taskmgr = PluginTaskManager(\ # engine, nworkers=nworkers, name=thr_name) # self.taskmgr.subscribe() # else: # thr_name = 'DASCore:TaskManager' # self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name) thr_name = 'DASCore:TaskManager' self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name) else: self.taskmgr = None if logger: self.logger = logger else: self.logger = PrintManager('DASCore', self.verbose) # define Mapping/Analytics/Parser in this order since Parser depends # on first two dasmapping = DASMapping(dasconfig) dasconfig['dasmapping'] = dasmapping self.mapping = dasmapping self.keylearning = DASKeyLearning(dasconfig) dasconfig['keylearning'] = self.keylearning # init DAS cache self.rawcache = DASMongocache(dasconfig) dasconfig['rawcache'] = self.rawcache # plug-in architecture: loop over registered data-services in # dasconfig; load appropriate module/class; register data # service with DASCore. self.systems = dasmapping.list_systems() # pointer to the DAS top level directory dasroot = '/'.join(__file__.split('/')[:-3]) for name in self.systems: try: klass = 'DAS/services/%s/%s_service.py' \ % (name, name) srvfile = os.path.join(dasroot, klass) with open(srvfile) as srvclass: for line in srvclass: if line.find('(DASAbstractService)') != -1: klass = line.split('(DASAbstractService)')[0] klass = klass.split('class ')[-1] break mname = 'DAS.services.%s.%s_service' % (name, name) module = __import__(mname, fromlist=[klass]) obj = getattr(module, klass)(dasconfig) setattr(self, name, obj) except IOError as err: if debug > 1: # we have virtual services, so IOError can be correct print_exc(err) try: mname = 'DAS.services.generic_service' module = __import__(mname, fromlist=['GenericService']) obj = module.GenericService(name, dasconfig) setattr(self, name, obj) except Exception as exc: print_exc(exc) msg = "Unable to load %s data-service plugin" % name raise Exception(msg) except Exception as exc: print_exc(exc) msg = "Unable to load %s data-service plugin" % name raise Exception(msg) # loop over systems and get system keys, add mapping keys to final list self.service_keys = {} self.service_parameters = {} for name in self.systems: skeys = list(getattr(self, name).keys()) self.service_keys[getattr(self, name).name] = skeys sparams = getattr(self, name).parameters() self.service_parameters[getattr(self, name).name] = sparams self.service_keys['special'] = das_special_keys() self.dasconfig = dasconfig das_timer('DASCore::init', self.verbose)