def worker(query, expire): """ Worker function which invoke DAS core to update cache for input query """ dascore = DASCore() status = dascore.call(query) return status
def create(self, **kwargs): "Create DASCore object" dashash = genkey(str(kwargs)) if dashash in self.params: return self.params[dashash] else: das = DASCore(**kwargs) self.params[dashash] = das return das
def setUp(self): """ set up DAS core module """ debug = 0 self.das = DASCore(debug=debug, multitask=False) config = deepcopy(das_readconfig()) dburi = config['mongodb']['dburi'] connection = MongoClient(dburi) connection.drop_database('das')
def manual_tests(): """ manual tests """ from DAS.keywordsearch.metadata.schema_adapter_factory import get_schema from DAS.core.das_core import DASCore schema_adapter = get_schema(DASCore(multitask=False)) fields_by_entity = schema_adapter.list_result_fields() ir_matcher = SimpleIREntityAttributeMatcher(fields_by_entity) def print_results(*args, **kwargs): """ run search and print results - used for testsing """ ir_matcher.search_index(*args, **kwargs) if False: print_results( keywords=u'files of Zmm with number of events more than 10', result_type=u'dataset') print_results(keywords=u'number events', result_type=u'dataset') print_results(keywords=u'number evented', result_type=u'dataset') print_results(keywords=u'dataset.nevents', result_type=u'dataset') print_results(keywords=u'dataset.numevents', result_type=u'dataset') # block.replica.subscribed vs block.replica.custodial # (the deepest name in here is the most important) print_results(keywords=u'replica fraction', result_type=u'block') print_results(keywords=u'replica fraction', result_type=u'site') print_results(keywords=u'custodial replica', result_type=u'block') print_results(keywords=u'replica_fraction', result_type=u'site') print('=========================================================') print_results(keywords=u'number', result_type=u'dataset') print_results(keywords=u'of', result_type=u'dataset') print_results(keywords=u'events', result_type=u'dataset') print_results(keywords=u'number of', result_type=u'dataset') print_results(keywords=u'of events', result_type=u'dataset') print_results(keywords=u'Number OF Events', result_type=u'dataset') print('Q: dataset_fraction') print_results(keywords=u'dataset_fraction', result_type=u'site') print('Q: dataset fraction') print_results(keywords=u'dataset fraction', result_type=u'site') print('Q: dataset part') print_results(keywords=u'dataset part', result_type=u'site') print('============================================') print('Q: file') print_results(keywords=u'file in', result_type='file', limit=4) print('============================================') print('Q: file in') print_results(keywords=u'file in', result_type='file', limit=4)
def __init__(cls, name, bases, d): type.__init__(cls, name, bases, d) # set up only once if hasattr(cls, 'global_dbs_inst') and cls.global_dbs_inst: return print('setUp in metaclass: getting dbs manager ' \ '(and fetching datasets if needed)') cls.global_dbs_mngr = initialize_global_dbs_mngr(update_required=False) cls.global_dbs_inst = get_global_dbs_inst() cls.kws = KeywordSearch(dascore=DASCore(multitask=False)) dasconfig = das_readconfig() cls.timeout = dasconfig['keyword_search']['timeout']
def __init__(self, config=None, query=None, sleep=600): self.dascore = DASCore(config, nores=True) logdir = getarg(config, 'logdir', '/tmp') self.pidfile = os.path.join(logdir, 'robot-%s.pid' % genkey(query)) if (hasattr(os, "devnull")): devnull = os.devnull else: devnull = "/dev/null" self.stdin = devnull # we do not read from stdinput self.stdout = getarg(config, 'stdout', devnull) self.stderr = getarg(config, 'stderr', devnull) self.query = query self.sleep = sleep
def init(self): """Init DAS web server, connect to DAS Core""" try: self.reqmgr = RequestManager(lifetime=self.lifetime) self.dasmgr = DASCore(engine=self.engine) self.repmgr = CMSRepresentation(self.dasconfig, self.dasmgr) self.daskeys = self.dasmgr.das_keys() self.gfs = db_gridfs(self.dburi) self.daskeys.sort() self.dasmapping = self.dasmgr.mapping self.dbs_url = self.dasmapping.dbs_url() self.dbs_global = self.dasmapping.dbs_global_instance() self.dbs_instances = self.dasmapping.dbs_instances() self.dasmapping.init_presentationcache() self.colors = {'das':gen_color('das')} for system in self.dasmgr.systems: self.colors[system] = gen_color(system) if not self.daskeyslist: keylist = [r for r in self.dasmapping.das_presentation_map()] keylist.sort(key=lambda r: r['das']) self.daskeyslist = keylist except ConnectionFailure as _err: tstamp = dastimestamp('') mythr = threading.current_thread() print("### MongoDB connection failure thread=%s, id=%s, time=%s" \ % (mythr.name, mythr.ident, tstamp)) except Exception as exc: print_exc(exc) self.dasmgr = None self.reqmgr = None self.dbs_url = None self.dbs_global = None self.dbs_instances = [] self.daskeys = [] self.colors = {} self.q_rewriter = None return # KWS and Query Rewriting failures are not fatal try: # init query rewriter, if needed if self.dasconfig['query_rewrite']['pk_rewrite_on']: self.q_rewriter = CMSQueryRewrite(self.repmgr, self.templatepage) except Exception as exc: print_exc(exc) self.q_rewriter = None
def init(self): """Init DAS web server, connect to DAS Core""" try: self.dasmgr = DASCore(multitask=False) self.dbs_instances = self.dasmgr.mapping.dbs_instances() self.dbs_global = self.dasmgr.mapping.dbs_global_instance() if KeywordSearchHandler: self.kws = KeywordSearchHandler(self.dasmgr) except ConnectionFailure: tstamp = dastimestamp('') mythr = threading.current_thread() print("### MongoDB connection failure thread=%s, id=%s, time=%s" \ % (mythr.name, mythr.ident, tstamp)) except Exception as exc: print_exc(exc) self.dasmgr = None self.kws = None
def __init__(self, config={}): DASWebManager.__init__(self, config) try: # try what is supplied from WebTools framework cdict = self.config.dictionary_() self.cachesrv = cdict.get('cache_server_url', 'http://localhost:8211') self.base = '/dascontrollers' except: # stand-alone version self.cachesrv = config.get('cache_server_url', 'http://localhost:8211') self.base = '/das' self.dasmgr = DASCore() self.daskeys = self.dasmgr.das_keys() self.daskeys.sort() self.dasmapping = self.dasmgr.mapping self.daslogger = self.dasmgr.logger self.pageviews = ['xml', 'list', 'json', 'yuijson'] msg = "DASSearch::init is started with base=%s" % self.base self.daslogger.debug(msg) print(msg)
def main(): "Main function" optmgr = DASOptionParser() opts = optmgr.parser.parse_args() t0 = time.time() query = opts.query if 'instance' not in query: query = ' instance=prod/global ' + query debug = opts.verbose dascore = DASCore(debug=debug, nores=opts.noresults) if opts.hash: dasquery = DASQuery(query) mongo_query = dasquery.mongo_query service_map = dasquery.service_apis_map() str_query = dasquery.storage_query print("---------------") print("DAS-QL query :", query) print("DAS query :", dasquery) print("Mongo query :", mongo_query) print("Storage query :", str_query) print("Services :\n") for srv, val in service_map.items(): print("%s : %s\n" % (srv, ', '.join(val))) sys.exit(0) sdict = dascore.keys() if opts.services: msg = "DAS services:" print(msg) print("-"*len(msg)) keys = list(sdict.keys()) keys.sort() for key in keys: print(key) elif opts.service: msg = "DAS service %s:" % opts.service print(msg) print("-"*len(msg)) keys = sdict[opts.service] keys.sort() for key in keys: print(key) elif opts.jsfile: kws_js(dascore, query, opts.idx, opts.limit, opts.jsfile, debug) sys.exit(0) elif opts.kfile: keylearning_js(dascore, query, opts.kfile, debug) sys.exit(0) elif query: idx = opts.idx limit = opts.limit output = opts.nooutput plain = opts.plain qcache = opts.qcache if opts.profile: import cProfile # python profiler import pstats # profiler statistics cmd = 'run(dascore,query,idx,limit,output,plain)' cProfile.runctx(cmd, globals(), locals(), 'profile.dat') info = pstats.Stats('profile.dat') info.sort_stats('cumulative') info.print_stats() else: run(dascore, query, idx, limit, output, plain) elif opts.dasconfig: print(pformat(dascore.dasconfig)) else: print() print("DAS CLI interface, no actions found,") print("please use --help for more options.") timestamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) timer = get_das_timer() print("\nDAS execution time:\n") if debug: timelist = [] for _, timerdict in timer.items(): counter = timerdict['counter'] tag = timerdict['tag'] exetime = timerdict['time'] timelist.append((counter, tag, exetime)) timelist.sort() for _, tag, exetime in timelist: print("%s %s sec" % (tag, round(exetime, 2))) print("Total %s sec, %s" % (round(time.time()-t0, 2), timestamp))
def __init__(self, config): self.config = config DASWebManager.__init__(self, config) self.version = __version__ self.methods = {} self.methods['GET'] = { 'request': { 'args': ['idx', 'limit', 'query', 'skey', 'order'], 'call': self.request, 'version': __version__ }, 'nresults': { 'args': ['query'], 'call': self.nresults, 'version': __version__ }, 'records': { 'args': ['query', 'count', 'collection'], 'call': self.records, 'version': __version__ }, 'status': { 'args': ['query'], 'call': self.status, 'version': __version__ }, } self.methods['POST'] = { 'create': { 'args': ['query', 'expire'], 'call': self.create, 'version': __version__ } } self.methods['PUT'] = { 'replace': { 'args': ['query', 'expire'], 'call': self.replace, 'version': __version__ } } self.methods['DELETE'] = { 'delete': { 'args': ['query'], 'call': self.delete, 'version': __version__ } } try: # WMCore/WebTools rest = RESTModel(config) rest.methods = self.methods # set RESTModel methods self.model = self # re-reference model to my class self.model.handler = rest.handler # reference handler to RESTModel cdict = self.config.dictionary_() self.base = '/rest' except: cdict = {} self.base = '' self.dascore = DASCore() dbhost = self.dascore.dasconfig['mongocache_dbhost'] dbport = self.dascore.dasconfig['mongocache_dbport'] capped_size = self.dascore.dasconfig['mongocache_capped_size'] self.con = Connection(dbhost, dbport) if 'logging' not in self.con.database_names(): db = self.con['logging'] options = {'capped': True, 'size': capped_size} db.create_collection('db', options) self.warning('Created logging.db, size=%s' % capped_size) self.col = self.con['logging']['db'] sleep = cdict.get('sleep', 2) verbose = cdict.get('verbose', None) iconfig = { 'sleep': sleep, 'verbose': verbose, 'logger': self.dascore.logger } self.cachemgr = DASCacheMgr(iconfig) thread.start_new_thread(self.cachemgr.worker, (worker, )) msg = 'DASCacheMode::init, host=%s, port=%s, capped_size=%s' \ % (dbhost, dbport, capped_size) self.dascore.logger.debug(msg) print(msg)
print('entity_names') pprint.pprint(self.entity_names) print('search_field_names') pprint.pprint(self._lookup_keys) #print 'ENTITY FIELDS (BY LOOKUP):' #pprint.pprint(dict(self._fields_dict)) print('ENTITY FIELDS (BY LOOKUP MULTI ENTITY):') pprint.pprint([ "{0}: {1}".format(lookup, self._fields_dict[lookup].keys()) for lookup in self._fields_dict.keys() if ',' in lookup ]) if __name__ == '__main__': from DAS.core.das_core import DASCore s = DasSchemaAdapter(DASCore(multitask=False)) #pprint.pprint(s.list_result_fields()) print('validate input params():', \ s.validate_input_params(set(), entity='dataset.name')) print( 'validate input params(dataset.name):', s.validate_input_params(set(['dataset.name']), entity='dataset.name')) print( 'validate input params run(dataset.name):', s.validate_input_params(set(['dataset.name']), entity='run.run_number', final_step=True)) # non related entity in input print(
# main # if __name__ == '__main__': optManager = DASOptionParser() (opts, args) = optManager.getOpt() if not len([val for val in opts.__dict__.values() if val]): print("Run with --help for more options") sys.exit(0) t0 = time.time() if opts.verbose: debug = opts.verbose else: debug = 0 MGR = DASCore(debug=debug) DAS = DASCouchDB(MGR) if opts.listviews: for viewname, viewmap in DAS.views.items(): print() print("DAS view:", viewname) print(viewmap['map']) sys.exit(0) if opts.delete: if opts.system: msg = "Delete '%s' docs in '%s' couch DB" % \ (opts.system, opts.delete) DAS.delete(opts.delete, opts.system) else: msg = "Delete '%s' couch DB" % opts.delete