def get_global_dbs_inst(): """ gets the name of global dbs instance """ from DAS.core.das_mapping_db import DASMapping dasconfig = das_readconfig() dasmapping = DASMapping(dasconfig) return dasmapping.dbs_global_instance()
def init(self): "Takes care of MongoDB connection since DASMapping requires it" try: dasmapping = DASMapping(self.dasconfig) mapping = dasmapping.servicemap(self.service_name) self.urls = mapping[self.service_api]['services'] self.expire = mapping[self.service_api]['expire'] except Exception as _exp: pass
def __init__(self, config=None): if not config: config = das_readconfig() self.dasmapping = DASMapping(config) if not self.dasmapping.check_maps(): msg = "No DAS maps found in MappingDB" raise Exception(msg) self.dasservices = config['services'] self.daskeysmap = self.dasmapping.daskeys() self.operators = list(das_operators()) self.daskeys = list(das_special_keys()) self.verbose = config['verbose'] self.logger = PrintManager('QLManger', self.verbose) for val in self.daskeysmap.values(): for item in val: self.daskeys.append(item)
def setUp(self): """ set up DAS core module """ debug = 0 config = deepcopy(das_readconfig()) logger = PrintManager('TestDASMongocache', verbose=debug) config['logger'] = logger config['verbose'] = debug dburi = config['mongodb']['dburi'] connection = MongoClient(dburi) connection.drop_database('das') dasmapping = DASMapping(config) config['dasmapping'] = dasmapping self.dasmongocache = DASMongocache(config)
def setUp(self): """ set up data used in the tests. setUp is called before each test function execution. """ self.i1 = "find dataset, run, bfield where site = T2 and admin=VK and storage=castor" self.i2 = " find dataset, run where (run=1 or run=2) and storage=castor or site = T2" debug = 0 config = das_readconfig() logger = DASLogger(verbose=debug, stdout=debug) config['logger'] = logger config['verbose'] = debug config['mapping_dbhost'] = 'localhost' config['mapping_dbport'] = 27017 config['mapping_dbname'] = 'mapping' config['dasmapping'] = DASMapping(config) config['dasanalytics'] = DASAnalytics(config) self.parser = MongoParser(config) self.operators = [o.strip() for o in DAS_OPERATORS]
def initialize_global_dbs_mngr(update_required=False): """ Gets a DBSDaemon for global DBS and fetches the data if needed. *Used for testing purposes only*. """ from DAS.core.das_mapping_db import DASMapping dasconfig = das_readconfig() dasmapping = DASMapping(dasconfig) dburi = dasconfig['mongodb']['dburi'] dbsexpire = dasconfig.get('dbs_daemon_expire', 3600) main_dbs_url = dasmapping.dbs_url() dbsmgr = DBSDaemon(main_dbs_url, dburi, {'expire': dbsexpire, 'preserve_on_restart': True}) # if we have no datasets (fresh DB, fetch them) if update_required or not next(dbsmgr.find('*Zmm*'), False): print('fetching datasets from global DBS...') dbsmgr.update() return dbsmgr
def init_trackers(): """ initialization """ # get list of trackers mapping = DASMapping(config=das_readconfig()) for provider in mapping.inputvalues_uris(): TRACKERS[provider['input']] = InputValuesTracker(provider)
def __init__(self, config=None, debug=0, nores=False, logger=None, engine=None, multitask=True): if config: dasconfig = config else: dasconfig = das_readconfig() verbose = dasconfig['verbose'] self.stdout = debug if isinstance(debug, int) and debug: self.verbose = debug dasconfig['verbose'] = debug else: self.verbose = verbose das_timer('DASCore::init', self.verbose) self.operators = das_operators() self.collect_wait_time = dasconfig['das'].get('collect_wait_time', 120) # set noresults option self.noresults = False if nores: dasconfig['write_cache'] = True self.noresults = nores self.init_expire = dasconfig['das'].get('init_expire', 5 * 60) self.multitask = dasconfig['das'].get('multitask', True) if debug or self.verbose: self.multitask = False # in verbose mode do not use multitask dasconfig['das']['multitask'] = False if not multitask: # explicitly call DASCore ctor self.multitask = False dasconfig['das']['multitask'] = False dasconfig['engine'] = engine if self.multitask: nworkers = dasconfig['das'].get('core_workers', 5) # if engine: # thr_name = 'DASCore:PluginTaskManager' # self.taskmgr = PluginTaskManager(\ # engine, nworkers=nworkers, name=thr_name) # self.taskmgr.subscribe() # else: # thr_name = 'DASCore:TaskManager' # self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name) thr_name = 'DASCore:TaskManager' self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name) else: self.taskmgr = None if logger: self.logger = logger else: self.logger = PrintManager('DASCore', self.verbose) # define Mapping/Analytics/Parser in this order since Parser depends # on first two dasmapping = DASMapping(dasconfig) dasconfig['dasmapping'] = dasmapping self.mapping = dasmapping self.keylearning = DASKeyLearning(dasconfig) dasconfig['keylearning'] = self.keylearning # init DAS cache self.rawcache = DASMongocache(dasconfig) dasconfig['rawcache'] = self.rawcache # plug-in architecture: loop over registered data-services in # dasconfig; load appropriate module/class; register data # service with DASCore. self.systems = dasmapping.list_systems() # pointer to the DAS top level directory dasroot = '/'.join(__file__.split('/')[:-3]) for name in self.systems: try: klass = 'DAS/services/%s/%s_service.py' \ % (name, name) srvfile = os.path.join(dasroot, klass) with open(srvfile) as srvclass: for line in srvclass: if line.find('(DASAbstractService)') != -1: klass = line.split('(DASAbstractService)')[0] klass = klass.split('class ')[-1] break mname = 'DAS.services.%s.%s_service' % (name, name) module = __import__(mname, fromlist=[klass]) obj = getattr(module, klass)(dasconfig) setattr(self, name, obj) except IOError as err: if debug > 1: # we have virtual services, so IOError can be correct print_exc(err) try: mname = 'DAS.services.generic_service' module = __import__(mname, fromlist=['GenericService']) obj = module.GenericService(name, dasconfig) setattr(self, name, obj) except Exception as exc: print_exc(exc) msg = "Unable to load %s data-service plugin" % name raise Exception(msg) except Exception as exc: print_exc(exc) msg = "Unable to load %s data-service plugin" % name raise Exception(msg) # loop over systems and get system keys, add mapping keys to final list self.service_keys = {} self.service_parameters = {} for name in self.systems: skeys = list(getattr(self, name).keys()) self.service_keys[getattr(self, name).name] = skeys sparams = getattr(self, name).parameters() self.service_parameters[getattr(self, name).name] = sparams self.service_keys['special'] = das_special_keys() self.dasconfig = dasconfig das_timer('DASCore::init', self.verbose)
def list_dbs_instances(): """ list all DBS instances """ from DAS.core.das_mapping_db import DASMapping dasconfig = das_readconfig() dasmapping = DASMapping(dasconfig) return dasmapping.dbs_instances()
def setUp(self): """ set up DAS core module """ debug = 0 self.db = 'test_mapping.db' config = deepcopy(das_readconfig()) dburi = config['mongodb']['dburi'] logger = PrintManager('TestDASMapping', verbose=debug) config['logger'] = logger config['verbose'] = debug dbname = 'test_mapping' collname = 'db' config['mappingdb'] = dict(dburi=dburi, dbname=dbname, collname=collname) # add some maps to mapping db conn = MongoClient(dburi) conn.drop_database(dbname) self.coll = conn[dbname][collname] self.pmap = { "presentation": { "block": [{ "ui": "Block name", "das": "block.name" }, { "ui": "Block size", "das": "block.size" }] }, "type": "presentation" } self.coll.insert(self.pmap) url = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader/acquisitioneras/' dformat = 'JSON' system = 'dbs3' expire = 100 rec = { 'system': system, 'urn': 'acquisitioneras', 'format': dformat, 'instances': ['prod/global'], 'url': url, 'expire': expire, 'lookup': 'era', 'params': {}, 'das_map': [{ "das_key": "era", "rec_key": "era.name", "api_arg": "era" }], 'type': 'service' } self.coll.insert(rec) ver_token = verification_token(self.coll.find(**PYMONGO_OPTS)) rec = {'verification_token': ver_token, 'type': 'verification_token'} self.coll.insert(rec) self.mgr = DASMapping(config)