Example #1
0
def list_dbs_instances():
    """ list all DBS instances """
    from DAS.core.das_mapping_db import DASMapping

    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)
    return dasmapping.dbs_instances()
Example #2
0
 def init(self):
     """Takes care of MongoDB connection"""
     try:
         indexes = [('dataset', DESCENDING), ('site', DESCENDING),
                    ('ts', DESCENDING)]
         for index in indexes:
             create_indexes(self.coll, [index])
         dasmapping   = DASMapping(self.dasconfig)
         service_name = self.config.get('name', 'combined')
         service_api  = self.config.get('api', 'dataset4site_release')
         mapping      = dasmapping.servicemap(service_name)
         self.urls    = mapping[service_api]['services']
         self.expire  = mapping[service_api]['expire']
         services     = self.dasconfig['services']
         if  not self.wthr:
             # Worker thread which update dbs/phedex DB
             self.wthr = start_new_thread('dbs_phedex_worker', worker, \
                  (self.urls, which_dbs, self.uri, \
                  self.dbname, self.collname, self.expire))
         msg = "### DBSPhedexService:init started"
         print(msg)
     except Exception as exc:
         print("### Fail DBSPhedexService:init\n", str(exc))
         self.urls       = None
         self.expire     = 60
         self.wthr       = None
Example #3
0
def get_global_dbs_inst():
    """
    gets the name of global dbs instance
    """
    from DAS.core.das_mapping_db import DASMapping
    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)
    return dasmapping.dbs_global_instance()
Example #4
0
 def init(self):
     "Takes care of MongoDB connection since DASMapping requires it"
     try:
         dasmapping  = DASMapping(self.dasconfig)
         mapping     = dasmapping.servicemap(self.service_name)
         self.urls   = mapping[self.service_api]['services']
         self.expire = mapping[self.service_api]['expire']
     except Exception as _exp:
         pass
Example #5
0
def get_global_dbs_inst():
    """
    gets the name of global dbs instance
    """
    from DAS.core.das_mapping_db import DASMapping

    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)
    return dasmapping.dbs_global_instance()
Example #6
0
def check_mappings_readiness():
    """
    return whether DASMaps are initialized
    """
    print('db alive. checking it\'s state...')
    try:
        dasmapping = DASMapping(das_readconfig())
        if dasmapping.check_maps():
            DASCore(multitask=False)
            return True
    except Exception as exc:
        print exc
    print 'no DAS mappings present...'
    return False
Example #7
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.db = "test_mapping.db"
        config = deepcopy(das_readconfig())
        dburi = config["mongodb"]["dburi"]
        logger = PrintManager("TestDASMapping", verbose=debug)
        config["logger"] = logger
        config["verbose"] = debug
        dbname = "test_mapping"
        collname = "db"
        config["mappingdb"] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {
            "presentation": {
                "block": [{"ui": "Block name", "das": "block.name"}, {"ui": "Block size", "das": "block.size"}]
            },
            "type": "presentation",
        }
        self.coll.insert(self.pmap)
        ver_token = verification_token(self.coll.find(exhaust=True))
        rec = {"verification_token": ver_token, "type": "verification_token"}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)
Example #8
0
 def __init__(self, config=None):
     if not config:
         config = das_readconfig()
     self.dasmapping = DASMapping(config)
     if not self.dasmapping.check_maps():
         msg = "No DAS maps found in MappingDB"
         raise Exception(msg)
     self.dasservices = config['services']
     self.daskeysmap = self.dasmapping.daskeys()
     self.operators = list(das_operators())
     self.daskeys = list(das_special_keys())
     self.verbose = config['verbose']
     self.logger = PrintManager('QLManger', self.verbose)
     for val in self.daskeysmap.values():
         for item in val:
             self.daskeys.append(item)
Example #9
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
#        config        = deepcopy(das_readconfig())
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://localhost:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs', 'phedex', 'sitedb', 'google_maps', 'ip']

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # create fresh DB
        self.clear_collections()
        self.mgr.delete_db_collection()
        self.mgr.create_db()

        # Add fake mapping records
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs', 'dbs.yml')
        self.add_service('phedex', 'phedex.yml')
        self.add_service('sitedb', 'sitedb.yml')

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()
Example #10
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://127.0.0.1:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs3', 'phedex', 'google_maps', 'ip']
        # Do not perform DAS map test, since we overwrite system and urls.
        # This is done to use standard DAS maps, but use local URLs, which
        # cause DAS hash map to be be wrong during a test
        config['map_test'] = False

        # Add fake mapping records
        self.clear_collections()
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs3', 'dbs3.yml')
        self.add_service('phedex', 'phedex.yml')

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()
Example #11
0
def initialize_global_dbs_mngr(update_required=False):
    """
    Gets a DBSDaemon for global DBS and fetches the data if needed.
    *Used for testing purposes only*.
    """
    from DAS.core.das_mapping_db import DASMapping

    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)

    dburi = dasconfig["mongodb"]["dburi"]
    dbsexpire = dasconfig.get("dbs_daemon_expire", 3600)
    main_dbs_url = dasmapping.dbs_url()
    dbsmgr = DBSDaemon(main_dbs_url, dburi, {"expire": dbsexpire, "preserve_on_restart": True})

    # if we have no datasets (fresh DB, fetch them)
    if update_required or not next(dbsmgr.find("*Zmm*"), False):
        print "fetching datasets from global DBS..."
        dbsmgr.update()
    return dbsmgr
Example #12
0
def initialize_global_dbs_mngr(update_required=False):
    """
    Gets a DBSDaemon for global DBS and fetches the data if needed.
    *Used for testing purposes only*.
    """
    from DAS.core.das_mapping_db import DASMapping

    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)

    dburi = dasconfig['mongodb']['dburi']
    dbsexpire = dasconfig.get('dbs_daemon_expire', 3600)
    main_dbs_url = dasmapping.dbs_url()
    dbsmgr = DBSDaemon(main_dbs_url, dburi, {'expire': dbsexpire,
                                             'preserve_on_restart': True})

    # if we have no datasets (fresh DB, fetch them)
    if update_required or not next(dbsmgr.find('*Zmm*'), False):
        print('fetching datasets from global DBS...')
        dbsmgr.update()
    return dbsmgr
Example #13
0
 def __init__(self, config=None):
     if  not config:
         config = das_readconfig()
     self.dasmapping  = DASMapping(config)
     if  not self.dasmapping.check_maps():
         msg = "No DAS maps found in MappingDB"
         raise Exception(msg)
     self.dasservices = config['services']
     self.daskeysmap  = self.dasmapping.daskeys()
     self.operators   = list(das_operators())
     self.daskeys     = list(das_special_keys())
     self.verbose     = config['verbose']
     self.logger      = PrintManager('QLManger', self.verbose)
     for val in self.daskeysmap.values():
         for item in val:
             self.daskeys.append(item)
Example #14
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        config = deepcopy(das_readconfig())
        logger = PrintManager('TestDASMongocache', verbose=debug)
        config['logger'] = logger
        config['verbose'] = debug
        dburi = config['mongodb']['dburi']

        connection = MongoClient(dburi)
        connection.drop_database('das')
        dasmapping = DASMapping(config)
        config['dasmapping'] = dasmapping
        self.dasmongocache = DASMongocache(config)
Example #15
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug    = 0
        self.db  = 'test_mapping.db'
        config   = deepcopy(das_readconfig())
        dburi    = config['mongodb']['dburi']
        logger   = PrintManager('TestDASMapping', verbose=debug)
        config['logger']  = logger
        config['verbose'] = debug
        dbname   = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {"presentation": {"block":[{"ui": "Block name", "das": "block.name"},
            {"ui": "Block size", "das": "block.size"}]}, "type": "presentation"}
        self.coll.insert(self.pmap)

        url     = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader/acquisitioneras/'
        dformat = 'JSON'
        system  = 'dbs3'
        expire  = 100
        rec = {'system':system, 'urn': 'acquisitioneras', 'format':dformat,
            'instances': ['prod/global'],
            'url':url, 'expire': expire, 'lookup': 'era',
            'params' : {},
             'das_map': [
                 {"das_key": "era", "rec_key":"era.name", "api_arg":"era"}
                 ],
             'type': 'service'
        }
        self.coll.insert(rec)

        ver_token = verification_token(self.coll.find(**PYMONGO_OPTS))
        rec = {'verification_token':ver_token, 'type':'verification_token'}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)
Example #16
0
    def setUp(self):
        """
        set up data used in the tests.
        setUp is called before each test function execution.
        """
        self.i1 = "find dataset, run, bfield where site = T2 and admin=VK and storage=castor"
        self.i2 = "  find dataset, run where (run=1 or run=2) and storage=castor or site = T2"

        debug = 0
        config = das_readconfig()
        logger = DASLogger(verbose=debug, stdout=debug)
        config['logger'] = logger
        config['verbose'] = debug
        config['mapping_dbhost'] = 'localhost'
        config['mapping_dbport'] = 27017
        config['mapping_dbname'] = 'mapping'
        config['dasmapping'] = DASMapping(config)
        config['dasanalytics'] = DASAnalytics(config)
        self.parser = MongoParser(config)
        self.operators = [o.strip() for o in DAS_OPERATORS]
Example #17
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug    = 0
        self.db  = 'test_mapping.db'
        config   = deepcopy(das_readconfig())
        dburi    = config['mongodb']['dburi']
        logger   = PrintManager('TestDASMapping', verbose=debug)
        config['logger']  = logger
        config['verbose'] = debug
        dbname   = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = Connection(dburi)
        conn.drop_database(dbname)
        coll = conn[dbname][collname]
        self.pmap = {"presentation": {"block":[{"ui": "Block name", "das": "block.name"}, 
        {"ui": "Block size", "das": "block.size"}]}}
        coll.insert(self.pmap)

        self.mgr = DASMapping(config)
Example #18
0
class QLManager(object):
    """
    DAS QL manager.
    """
    def __init__(self, config=None):
        if  not config:
            config = das_readconfig()
        self.dasmapping  = DASMapping(config)
        if  not self.dasmapping.check_maps():
            msg = "No DAS maps found in MappingDB"
            raise Exception(msg)
        self.analytics   = DASAnalytics(config)
        self.dasservices = config['services']
        self.daskeysmap  = self.dasmapping.daskeys()
        self.operators   = list(das_operators())
        self.daskeys     = list(das_special_keys())
        self.verbose     = config['verbose']
        self.logger      = PrintManager('QLManger', self.verbose)
        for val in self.daskeysmap.values():
            for item in val:
                self.daskeys.append(item)
        parserdir   = config['das']['parserdir']
        self.parserdir = parserdir

        self.enabledb = config['parserdb']['enable']
        if  self.enabledb:
            self.parserdb = DASParserDB(config)

    def parse(self, query):
        """
        Parse input query and return query in MongoDB form.
        Optionally parsed query can be written into analytics DB.
        """
        mongo_query = self.mongo_query(query)
        self.convert2skeys(mongo_query)
        return mongo_query

    def add_to_analytics(self, query, mongo_query):
        "Add DAS query to analytics DB"
        self.analytics.add_query(query, mongo_query)

    def get_ply_query(self, query):
        """
        Get ply object for given query. Since we rely on PLY package and it may
        fail under the load we use couple of trials.
        """
        ply_query = ply_parse_query(query, self.daskeys, self.dasservices,
                    self.parserdir, self.verbose)
        return ply_query

    def mongo_query(self, query):
        """
        Return mongo query for provided input query
        """
        mongo_query = None
        if  self.verbose:
            ply_output(query, self.daskeys, self.dasservices,
                    self.parserdir, self.verbose)
        parse_again = True
        if  self.enabledb:
            status, value = self.parserdb.lookup_query(query)
            if status == PARSERCACHE_VALID and \
                len(last_key_pattern.findall(query)) == 0:
                mongo_query = value
                parse_again = False
            elif status == PARSERCACHE_INVALID:
                # we unable to find query in parserdb, so will parse again
                parse_again = True
            else:
                ply_query = self.get_ply_query(query)
                if  ply_query:
                    try:
                        mongo_query = ply2mongo(ply_query)
                        parse_again = False
                    except Exception as exc:
                        msg = "Fail in ply2mongo, query=%s, ply_query=%s" \
                                % (query, ply_query)
                        print msg
                    try:
                        self.parserdb.insert_valid_query(query, mongo_query)
                    except Exception as exc:
                        msg = "Fail to insert into parserdb, exception=%s" \
                                % str(exc)
                        print_exc(msg, print_traceback=True)
        if  parse_again:
            try:
                ply_query   = self.get_ply_query(query)
                mongo_query = ply2mongo(ply_query)
            except Exception as exc:
                msg = "Fail to parse query='%s'" % query
                print_exc(msg, print_traceback=False)
                raise exc
        if  set(mongo_query.keys()) & set(['fields', 'spec']) != \
                set(['fields', 'spec']):
            raise Exception('Invalid MongoDB query %s' % mongo_query)
        if  not mongo_query['fields'] and len(mongo_query['spec'].keys()) > 1:
            raise Exception(ambiguous_msg(query, mongo_query['spec'].keys()))
        for key, val in mongo_query['spec'].iteritems():
            if  isinstance(val, list):
                raise Exception(ambiguos_val_msg(query, key, val))
        return mongo_query

    def convert2skeys(self, mongo_query):
        """
        Convert DAS input keys into DAS selection keys.
        """
        if  not mongo_query['spec']:
            for key in mongo_query['fields']:
                for system in self.dasmapping.list_systems():
                    mapkey = self.dasmapping.find_mapkey(system, key)
                    if  mapkey:
                        mongo_query['spec'][mapkey] = '*'
            return
        spec = mongo_query['spec']
        to_replace = []
        for key, val in spec.iteritems():
            for system in self.dasmapping.list_systems():
                mapkey = self.dasmapping.find_mapkey(system, key, val)
                if  mapkey and mapkey != key and \
                    key in mongo_query['spec']:
                    to_replace.append((key, mapkey))
                    continue
        for key, mapkey in to_replace:
            if  key in mongo_query['spec']:
                mongo_query['spec'][mapkey] = mongo_query['spec'][key]
                del mongo_query['spec'][key]
        
    def services(self, query):
        """Find out DAS services to use for provided query"""
        skeys, cond = decompose(query)
        if  not skeys:
            skeys = []
        if  isinstance(skeys, str):
            skeys = [skeys]
        slist = []
        # look-up services from Mapping DB
        for key in skeys + [i for i in cond.keys()]:
            for service, keys in self.daskeysmap.iteritems():
                if  service not in self.dasservices:
                    continue
                value = cond.get(key, None)
                daskeys = self.dasmapping.find_daskey(service, key, value)
                if  set(keys) & set(daskeys) and service not in slist:
                    slist.append(service)
        # look-up special key condition
        requested_system = query.get('system', None)
        if  requested_system:
            if  isinstance(requested_system, basestring):
                requested_system = [requested_system]
            return list( set(slist) & set(requested_system) )
        return slist

    def service_apis_map(self, query):
        """
        Find out which APIs correspond to provided query.
        Return a map of found services and their apis.
        """
        skeys, cond = decompose(query)
        if  not skeys:
            skeys = []
        if  isinstance(skeys, str):
            skeys = [skeys]
        adict = {}
        mapkeys = [key for key in cond.keys() if key not in das_special_keys()]
        services = self.services(query)
        for srv in services:
            alist = self.dasmapping.list_apis(srv)
            for api in alist:
                daskeys = self.dasmapping.api_info(srv, api)['das_map']
                maps = [r['rec_key'] for r in daskeys]
                if  set(mapkeys) & set(maps) == set(mapkeys): 
                    if  srv in adict:
                        new_list = adict[srv] + [api]
                        adict[srv] = list( set(new_list) )
                    else:
                        adict[srv] = [api]
        return adict

    def params(self, query):
        """
        Return dictionary of parameters to be used in DAS Core:
        selection keys, conditions and services.
        """
        skeys, cond = decompose(query)
        services = []
        for srv in self.services(query):
            if  srv not in services:
                services.append(srv)
        return dict(selkeys=skeys, conditions=cond, services=services)
Example #19
0
class QLManager(object):
    """
    DAS QL manager.
    """
    def __init__(self, config=None):
        if not config:
            config = das_readconfig()
        self.dasmapping = DASMapping(config)
        if not self.dasmapping.check_maps():
            msg = "No DAS maps found in MappingDB"
            raise Exception(msg)
        self.dasservices = config['services']
        self.daskeysmap = self.dasmapping.daskeys()
        self.operators = list(das_operators())
        self.daskeys = list(das_special_keys())
        self.verbose = config['verbose']
        self.logger = PrintManager('QLManger', self.verbose)
        for val in self.daskeysmap.values():
            for item in val:
                self.daskeys.append(item)

    def parse(self, query):
        """
        Parse input query and return query in MongoDB form.
        """
        mongo_query = self.mongo_query(query)
        self.convert2skeys(mongo_query)
        return mongo_query

    def mongo_query(self, query):
        """
        Return mongo query for provided input query
        """
        mongo_query = parse_query(query, self.daskeys, self.dasservices,
                                  self.verbose)
        if  set(mongo_query.keys()) & set(['fields', 'spec']) != \
                set(['fields', 'spec']):
            raise Exception('Invalid MongoDB query %s' % mongo_query)
        if not mongo_query['fields'] and len(mongo_query['spec'].keys()) > 1:
            raise Exception(ambiguous_msg(query, mongo_query['spec'].keys()))
        for key, val in mongo_query['spec'].items():
            if isinstance(val, list):
                raise Exception(ambiguos_val_msg(query, key, val))
        return mongo_query

    def convert2skeys(self, mongo_query):
        """
        Convert DAS input keys into DAS selection keys.
        """
        if not mongo_query['spec']:
            for key in mongo_query['fields']:
                for system in self.dasmapping.list_systems():
                    mapkey = self.dasmapping.find_mapkey(system, key)
                    if mapkey:
                        mongo_query['spec'][mapkey] = '*'
            return
        spec = mongo_query['spec']
        to_replace = []
        for key, val in spec.items():
            for system in self.dasmapping.list_systems():
                mapkey = self.dasmapping.find_mapkey(system, key, val)
                if  mapkey and mapkey != key and \
                    key in mongo_query['spec']:
                    to_replace.append((key, mapkey))
                    continue
        for key, mapkey in to_replace:
            if key in mongo_query['spec']:
                mongo_query['spec'][mapkey] = mongo_query['spec'][key]
                del mongo_query['spec'][key]

    def services(self, query):
        """Find out DAS services to use for provided query"""
        skeys, cond = decompose(query)
        if not skeys:
            skeys = []
        if isinstance(skeys, str):
            skeys = [skeys]
        slist = []
        # look-up services from Mapping DB
        for key in skeys + [i for i in cond.keys()]:
            for service, keys in self.daskeysmap.items():
                if service not in self.dasservices:
                    continue
                value = cond.get(key, None)
                daskeys = self.dasmapping.find_daskey(service, key, value)
                if set(keys) & set(daskeys) and service not in slist:
                    slist.append(service)
        # look-up special key condition
        requested_system = query.get('system', None)
        if requested_system:
            if isinstance(requested_system, basestring):
                requested_system = [requested_system]
            return list(set(slist) & set(requested_system))
        return slist

    def service_apis_map(self, query):
        """
        Find out which APIs correspond to provided query.
        Return a map of found services and their apis.
        """
        skeys, cond = decompose(query)
        if not skeys:
            skeys = []
        if isinstance(skeys, str):
            skeys = [skeys]
        adict = {}
        mapkeys = [
            key for key in list(cond.keys()) if key not in das_special_keys()
        ]
        services = self.services(query)
        for srv in services:
            alist = self.dasmapping.list_apis(srv)
            for api in alist:
                daskeys = self.dasmapping.api_info(srv, api)['das_map']
                maps = [r['rec_key'] for r in daskeys]
                if set(mapkeys) & set(maps) == set(mapkeys):
                    if srv in adict:
                        new_list = adict[srv] + [api]
                        adict[srv] = list(set(new_list))
                    else:
                        adict[srv] = [api]
        return adict

    def params(self, query):
        """
        Return dictionary of parameters to be used in DAS Core:
        selection keys, conditions and services.
        """
        skeys, cond = decompose(query)
        services = []
        for srv in self.services(query):
            if srv not in services:
                services.append(srv)
        return dict(selkeys=skeys, conditions=cond, services=services)
Example #20
0
class testDASMapping(unittest.TestCase):
    """
    A test class for the DAS mappingdb class
    """

    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.db = "test_mapping.db"
        config = deepcopy(das_readconfig())
        dburi = config["mongodb"]["dburi"]
        logger = PrintManager("TestDASMapping", verbose=debug)
        config["logger"] = logger
        config["verbose"] = debug
        dbname = "test_mapping"
        collname = "db"
        config["mappingdb"] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {
            "presentation": {
                "block": [{"ui": "Block name", "das": "block.name"}, {"ui": "Block size", "das": "block.size"}]
            },
            "type": "presentation",
        }
        self.coll.insert(self.pmap)
        ver_token = verification_token(self.coll.find(exhaust=True))
        rec = {"verification_token": ver_token, "type": "verification_token"}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)

    def tearDown(self):
        """Invoke after each test"""
        self.mgr.delete_db()

    def test_api(self):
        """test methods for api table"""
        self.mgr.delete_db()
        self.mgr.init()

        apiversion = "DBS_2_0_8"
        url = "http://a.com"
        dformat = "JSON"
        expire = 100

        api = "listRuns"
        params = {"apiversion": apiversion, "path": "required", "api": api}
        rec = {
            "system": "dbs",
            "urn": api,
            "format": dformat,
            "url": url,
            "params": params,
            "expire": expire,
            "lookup": "run",
            "wild_card": "*",
            "das_map": [dict(das_key="run", rec_key="run.run_number", api_arg="path")],
            "type": "service",
        }
        self.mgr.add(rec)
        smap = {
            api: {
                "url": url,
                "expire": expire,
                "keys": ["run"],
                "format": dformat,
                "wild_card": "*",
                "cert": None,
                "ckey": None,
                "services": "",
                "lookup": "run",
                "params": {"path": "required", "api": api, "apiversion": "DBS_2_0_8"},
            }
        }

        rec = {
            "system": "dbs",
            "urn": "listBlocks",
            "format": dformat,
            "url": url,
            "expire": expire,
            "lookup": "block",
            "params": {
                "apiversion": apiversion,
                "api": "listBlocks",
                "block_name": "*",
                "storage_element_name": "*",
                "user_type": "NORMAL",
            },
            "das_map": [
                {"das_key": "block", "rec_key": "block.name", "api_arg": "block_name"},
                {
                    "das_key": "site",
                    "rec_key": "site.se",
                    "api_arg": "storage_element_name",
                    "pattern": "re.compile('([a-zA-Z0-9]+\.){2}')",
                },
            ],
            "type": "service",
        }
        self.mgr.add(rec)

        system = "dbs"
        api = "listBlocks"
        daskey = "block"
        rec_key = "block.name"
        api_input = "block_name"

        res = self.mgr.list_systems()
        self.assertEqual(["dbs"], res)

        res = self.mgr.list_apis()
        #        self.assertEqual([api], res)
        res.sort()
        self.assertEqual(["listBlocks", "listRuns"], res)

        res = self.mgr.lookup_keys(system, api, daskey)
        self.assertEqual([rec_key], res)

        value = ""
        res = self.mgr.das2api(system, api, rec_key, value)
        self.assertEqual([api_input], res)

        # adding another params which default is None
        res = self.mgr.das2api(system, api, rec_key, value)
        self.assertEqual([api_input], res)

        res = self.mgr.api2das(system, api_input)
        self.assertEqual([daskey], res)

        # adding notations
        notations = {
            "system": system,
            "type": "notation",
            "notations": [
                {"api_output": "storage_element_name", "rec_key": "se", "api": ""},
                {"api_output": "number_of_events", "rec_key": "nevents", "api": ""},
            ],
        }
        self.mgr.add(notations)

        res = self.mgr.notation2das(system, "number_of_events")
        self.assertEqual("nevents", res)

        # API keys
        res = self.mgr.api2daskey(system, api)
        self.assertEqual(["block", "site"], res)

        # build service map
        smap.update(
            {
                api: {
                    "url": url,
                    "expire": expire,
                    "cert": None,
                    "ckey": None,
                    "keys": ["block", "site"],
                    "format": dformat,
                    "wild_card": "*",
                    "services": "",
                    "lookup": daskey,
                    "params": {
                        "storage_element_name": "*",
                        "api": api,
                        "block_name": "*",
                        "user_type": "NORMAL",
                        "apiversion": "DBS_2_0_8",
                    },
                }
            }
        )
        res = self.mgr.servicemap(system)
        self.assertEqual(smap, res)

    def test_presentation(self):
        """test presentation method"""
        self.mgr.init()
        expect = self.pmap["presentation"]["block"]
        result = self.mgr.presentation("block")
        self.assertEqual(expect, result)

    def test_notations(self):
        """test notations method"""
        self.mgr.init()
        system = "test"
        rec = {
            "notations": [
                {"api_output": "site.resource_element.cms_name", "rec_key": "site.name", "api": ""},
                {"api_output": "site.resource_pledge.cms_name", "rec_key": "site.name", "api": ""},
                {"api_output": "admin.contacts.cms_name", "rec_key": "site.name", "api": ""},
            ],
            "system": system,
            "type": "notation",
        }
        self.mgr.add(rec)
        expect = rec["notations"]
        result = self.mgr.notations(system)[system]
        self.assertEqual(expect, result)
Example #21
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.db = 'test_mapping.db'
        config = deepcopy(das_readconfig())
        dburi = config['mongodb']['dburi']
        logger = PrintManager('TestDASMapping', verbose=debug)
        config['logger'] = logger
        config['verbose'] = debug
        dbname = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi,
                                   dbname=dbname,
                                   collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {
            "presentation": {
                "block": [{
                    "ui": "Block name",
                    "das": "block.name"
                }, {
                    "ui": "Block size",
                    "das": "block.size"
                }]
            },
            "type": "presentation"
        }
        self.coll.insert(self.pmap)

        url = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader/acquisitioneras/'
        dformat = 'JSON'
        system = 'dbs3'
        expire = 100
        rec = {
            'system':
            system,
            'urn':
            'acquisitioneras',
            'format':
            dformat,
            'instances': ['prod/global'],
            'url':
            url,
            'expire':
            expire,
            'lookup':
            'era',
            'params': {},
            'das_map': [{
                "das_key": "era",
                "rec_key": "era.name",
                "api_arg": "era"
            }],
            'type':
            'service'
        }
        self.coll.insert(rec)

        ver_token = verification_token(self.coll.find(**PYMONGO_OPTS))
        rec = {'verification_token': ver_token, 'type': 'verification_token'}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)
Example #22
0
class testDASMapping(unittest.TestCase):
    """
    A test class for the DAS mappingdb class
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug    = 0
        self.db  = 'test_mapping.db'
        config   = deepcopy(das_readconfig())
        dburi    = config['mongodb']['dburi']
        logger   = PrintManager('TestDASMapping', verbose=debug)
        config['logger']  = logger
        config['verbose'] = debug
        dbname   = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = Connection(dburi)
        conn.drop_database(dbname)
        coll = conn[dbname][collname]
        self.pmap = {"presentation": {"block":[{"ui": "Block name", "das": "block.name"}, 
        {"ui": "Block size", "das": "block.size"}]}}
        coll.insert(self.pmap)

        self.mgr = DASMapping(config)

    def tearDown(self):
        """Invoke after each test"""
        self.mgr.delete_db()

    def test_api(self):                          
        """test methods for api table"""
        self.mgr.delete_db()
        self.mgr.create_db()

        apiversion = 'DBS_2_0_8'
        url     = 'http://a.com'
        dformat = 'JSON'
        expire  = 100

        api = 'listRuns'
        params = { 'apiversion':apiversion, 'path' : 'required', 'api':api}
        rec = {'system' : 'dbs', 'urn':api, 'format':dformat, 'url':url,
            'params': params, 'expire':expire, "wild_card": "*",
            'daskeys' : [dict(key='run', map='run.run_number', pattern='')],
            'das2api' : [
                    dict(api_param='path', das_key='dataset', pattern=""),
            ]
        }
        self.mgr.add(rec)
        res = self.mgr.check_dasmap('dbs', api, 'run.bfield')
        self.assertEqual(False, res)
        res = self.mgr.check_dasmap('dbs', api, 'run.run_number')
        self.assertEqual(True, res)
        smap = {api: {'url':url, 'expire':expire, 'keys': ['run'], 
                'format': dformat, "wild_card":"*", 'cert':None, 'ckey': None,
                'params': {'path': 'required', 'api': api, 
                           'apiversion': 'DBS_2_0_8'}
                     }
        }

        rec = {'system':'dbs', 'urn': 'listBlocks', 'format':dformat,
          'url':url, 'expire': expire,
          'params' : {'apiversion': apiversion, 'api': 'listBlocks',
                      'block_name':'*', 'storage_element_name':'*',
                      'user_type':'NORMAL'},
          'daskeys': [
                 {'key':'block', 'map':'block.name', 'pattern':''},
                 ],
          'das2api': [
                 {'api_param':'storage_element_name', 
                  'das_key':'site', 
                  'pattern':"re.compile('([a-zA-Z0-9]+\.){2}')"},
                 {'api_param':'storage_element_name', 
                  'das_key':'site.se', 
                  'pattern':"re.compile('([a-zA-Z0-9]+\.){2}')"},
                 {'api_param':'block_name', 
                  'das_key':'block', 
                  'pattern':""},
                 {'api_param':'block_name', 
                  'das_key':'block.name', 
                  'pattern':""},
                 ]
        } 
        self.mgr.add(rec)


        system = 'dbs'
        api = 'listBlocks'
        daskey = 'block'
        primkey = 'block.name'
        api_input = 'block_name'

        res = self.mgr.list_systems()
        self.assertEqual(['dbs'], res)

        res = self.mgr.list_apis()
#        self.assertEqual([api], res)
        res.sort()
        self.assertEqual(['listBlocks', 'listRuns'], res)

        res = self.mgr.lookup_keys(system, daskey)
        self.assertEqual([primkey], res)

        value = ''
        res = self.mgr.das2api(system, daskey, value)
        self.assertEqual([api_input], res)

        # adding another params which default is None
        res = self.mgr.das2api(system, daskey, value, api)
        self.assertEqual([api_input], res)

        res = self.mgr.api2das(system, api_input)
        self.assertEqual([daskey, primkey], res)

        # adding notations
        notations = {'system':system, 
            'notations':[
                    {'notation':'storage_element_name', 'map':'se', 'api':''},
                    {'notation':'number_of_events', 'map':'nevents', 'api':''},
                        ]
        }
        self.mgr.add(notations)

        res = self.mgr.notation2das(system, 'number_of_events')
        self.assertEqual('nevents', res)

        # API keys
        res = self.mgr.api2daskey(system, api)
        self.assertEqual([daskey], res)

        # build service map
        smap.update({api: {'url':url, 'expire':expire, 'cert':None, 'ckey': None,
                'keys': ['block'], 'format':dformat, "wild_card": "*",
                'params': {'storage_element_name': '*', 'api':api, 
                           'block_name': '*', 'user_type': 'NORMAL', 
                           'apiversion': 'DBS_2_0_8'}
                     }
        })
        res = self.mgr.servicemap(system)
        self.assertEqual(smap, res)

    def test_presentation(self):                          
        """test presentation method"""
        self.mgr.create_db()
#        rec = {'presentation':{'block':['block.name', 'block.size'], 'size':['size.name']}}
#        self.mgr.add(rec)
        expect = self.pmap['presentation']['block']
        result = self.mgr.presentation('block')
        self.assertEqual(expect, result)

    def test_notations(self):                          
        """test notations method"""
        self.mgr.create_db()
        system = "test"
        rec = {'notations': [
        {"notation": "site.resource_element.cms_name", "map": "site.name", "api": ""},
        {"notation": "site.resource_pledge.cms_name", "map": "site.name", "api": ""},
        {"notation": "admin.contacts.cms_name", "map":"site.name", "api":""}
        ], "system": system}
        self.mgr.add(rec)
        expect = rec['notations']
        result = self.mgr.notations(system)[system]
        self.assertEqual(expect, result)
Example #23
0
    def __init__(self,
                 config=None,
                 debug=0,
                 nores=False,
                 logger=None,
                 engine=None,
                 multitask=True):
        if config:
            dasconfig = config
        else:
            dasconfig = das_readconfig()
        verbose = dasconfig['verbose']
        self.stdout = debug
        if isinstance(debug, int) and debug:
            self.verbose = debug
            dasconfig['verbose'] = debug
        else:
            self.verbose = verbose
        das_timer('DASCore::init', self.verbose)
        self.operators = das_operators()
        self.collect_wait_time = dasconfig['das'].get('collect_wait_time', 120)

        # set noresults option
        self.noresults = False
        if nores:
            dasconfig['write_cache'] = True
            self.noresults = nores

        self.init_expire = dasconfig['das'].get('init_expire', 5 * 60)
        self.multitask = dasconfig['das'].get('multitask', True)
        if debug or self.verbose:
            self.multitask = False  # in verbose mode do not use multitask
            dasconfig['das']['multitask'] = False
        if not multitask:  # explicitly call DASCore ctor
            self.multitask = False
            dasconfig['das']['multitask'] = False
        dasconfig['engine'] = engine
        if self.multitask:
            nworkers = dasconfig['das'].get('core_workers', 5)
            #             if  engine:
            #                 thr_name = 'DASCore:PluginTaskManager'
            #                 self.taskmgr = PluginTaskManager(\
            #                         engine, nworkers=nworkers, name=thr_name)
            #                 self.taskmgr.subscribe()
            #             else:
            #                 thr_name = 'DASCore:TaskManager'
            #                 self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
            thr_name = 'DASCore:TaskManager'
            self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
        else:
            self.taskmgr = None

        if logger:
            self.logger = logger
        else:
            self.logger = PrintManager('DASCore', self.verbose)

        # define Mapping/Analytics/Parser in this order since Parser depends
        # on first two
        dasmapping = DASMapping(dasconfig)
        dasconfig['dasmapping'] = dasmapping
        self.mapping = dasmapping

        self.keylearning = DASKeyLearning(dasconfig)
        dasconfig['keylearning'] = self.keylearning

        # init DAS cache
        self.rawcache = DASMongocache(dasconfig)
        dasconfig['rawcache'] = self.rawcache

        # plug-in architecture: loop over registered data-services in
        # dasconfig; load appropriate module/class; register data
        # service with DASCore.
        self.systems = dasmapping.list_systems()
        # pointer to the DAS top level directory
        dasroot = '/'.join(__file__.split('/')[:-3])
        for name in self.systems:
            try:
                klass  = 'DAS/services/%s/%s_service.py' \
                    % (name, name)
                srvfile = os.path.join(dasroot, klass)
                with open(srvfile) as srvclass:
                    for line in srvclass:
                        if line.find('(DASAbstractService)') != -1:
                            klass = line.split('(DASAbstractService)')[0]
                            klass = klass.split('class ')[-1]
                            break
                mname = 'DAS.services.%s.%s_service' % (name, name)
                module = __import__(mname, fromlist=[klass])
                obj = getattr(module, klass)(dasconfig)
                setattr(self, name, obj)
            except IOError as err:
                if debug > 1:
                    # we have virtual services, so IOError can be correct
                    print_exc(err)
                try:
                    mname = 'DAS.services.generic_service'
                    module = __import__(mname, fromlist=['GenericService'])
                    obj = module.GenericService(name, dasconfig)
                    setattr(self, name, obj)
                except Exception as exc:
                    print_exc(exc)
                    msg = "Unable to load %s data-service plugin" % name
                    raise Exception(msg)
            except Exception as exc:
                print_exc(exc)
                msg = "Unable to load %s data-service plugin" % name
                raise Exception(msg)

        # loop over systems and get system keys, add mapping keys to final list
        self.service_keys = {}
        self.service_parameters = {}
        for name in self.systems:
            skeys = list(getattr(self, name).keys())
            self.service_keys[getattr(self, name).name] = skeys
            sparams = getattr(self, name).parameters()
            self.service_parameters[getattr(self, name).name] = sparams

        self.service_keys['special'] = das_special_keys()
        self.dasconfig = dasconfig
        das_timer('DASCore::init', self.verbose)
Example #24
0
class testDASMapping(unittest.TestCase):
    """
    A test class for the DAS mappingdb class
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug    = 0
        self.db  = 'test_mapping.db'
        config   = deepcopy(das_readconfig())
        dburi    = config['mongodb']['dburi']
        logger   = PrintManager('TestDASMapping', verbose=debug)
        config['logger']  = logger
        config['verbose'] = debug
        dbname   = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {"presentation": {"block":[{"ui": "Block name", "das": "block.name"},
            {"ui": "Block size", "das": "block.size"}]}, "type": "presentation"}
        self.coll.insert(self.pmap)

        url     = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader/acquisitioneras/'
        dformat = 'JSON'
        system  = 'dbs3'
        expire  = 100
        rec = {'system':system, 'urn': 'acquisitioneras', 'format':dformat,
            'instances': ['prod/global'],
            'url':url, 'expire': expire, 'lookup': 'era',
            'params' : {},
             'das_map': [
                 {"das_key": "era", "rec_key":"era.name", "api_arg":"era"}
                 ],
             'type': 'service'
        }
        self.coll.insert(rec)

        ver_token = verification_token(self.coll.find(**PYMONGO_OPTS))
        rec = {'verification_token':ver_token, 'type':'verification_token'}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)

    def tearDown(self):
        """Invoke after each test"""
        self.mgr.delete_db()

    def test_api(self):
        """test methods for api table"""
        self.mgr.delete_db()

        system  = 'dbs3'
        url     = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader'
        dformat = 'JSON'
        expire  = 100
        instances = ["prod/global", "prod/phys01"]

        api = 'primarydatasets'
        params = {"primary_ds_name":"*"}
        rec = {'system':system, 'urn':api, 'format':dformat, 'url':url,
            'instances': instances,
            'params': params, 'expire':expire, 'lookup': 'primary_dataset', 'wild_card':'*',
            'das_map' : [dict(das_key='primary_dataset',
                              rec_key='primary_dataset.name',
                              api_arg='primary_dataset')],
            'type': 'service'
        }
        self.mgr.add(rec)
        smap = {api: {'url':url, 'expire':expire, 'keys': ['primary_dataset'],
                'format': dformat, 'wild_card':'*', 'cert':None, 'ckey': None,
                'services': '', 'lookup': 'primary_dataset',
                'params': params }
        }

        rec = {'system':system, 'urn': 'datasetaccesstypes', 'format':dformat,
            'instances': instances,
            'url':url, 'expire': expire, 'lookup': 'status',
            'params' : {'status':'*'},
             'das_map': [
                 {"das_key": "status", "rec_key":"status.name", "api_arg":"status"}
                 ],
             'type': 'service'
        }
        self.mgr.add(rec)


        api = 'datasetaccesstypes'
        daskey = 'status'
        rec_key = 'status.name'
        api_input = 'status'

        res = self.mgr.list_systems()
        self.assertEqual([system], res)

        res = self.mgr.list_apis()
        res.sort()
        self.assertEqual(['datasetaccesstypes', 'primarydatasets'], res)

        res = self.mgr.lookup_keys(system, api, daskey)
        self.assertEqual([rec_key], res)

        value = ''
        res = self.mgr.das2api(system, api, rec_key, value)
        self.assertEqual([api_input], res)

        # adding another params which default is None
        res = self.mgr.das2api(system, api, rec_key, value)
        self.assertEqual([api_input], res)

        res = self.mgr.api2das(system, api_input)
        self.assertEqual([daskey], res)

        # adding notations
        notations = {'system':system, 'type': 'notation',
            'notations':[
                    {'api_output':'storage_element_name', 'rec_key':'se', 'api':''},
                    {'api_output':'number_of_events', 'rec_key':'nevents', 'api':''},
                        ]
        }
        self.mgr.add(notations)

        res = self.mgr.notation2das(system, 'number_of_events')
        self.assertEqual('nevents', res)

        # API keys
        res = self.mgr.api2daskey(system, api)
        self.assertEqual(['status'], res)

        # build service map
        smap.update({api: {'url':url, 'expire':expire, 'cert':None, 'ckey': None,
                'keys': ['status'], 'format':dformat, 'wild_card':'*',
                'services': '', 'lookup': daskey,
                'params': {"status": "*"}
                     }
        })
        res = self.mgr.servicemap(system)
        self.assertEqual(smap, res)

    def test_presentation(self):
        """test presentation method"""
        expect = self.pmap['presentation']['block']
        result = self.mgr.presentation('block')
        self.assertEqual(expect, result)

    def test_notations(self):
        """test notations method"""
        system = "test"
        rec = {'notations': [
        {"api_output": "site.resource_element.cms_name", "rec_key": "site.name", "api": ""},
        {"api_output": "site.resource_pledge.cms_name", "rec_key": "site.name", "api": ""},
        {"api_output": "admin.contacts.cms_name", "rec_key":"site.name", "api":""}
        ], "system": system, "type": "notation"}
        self.mgr.add(rec)
        expect = rec['notations']
        result = self.mgr.notations(system)[system]
        self.assertEqual(expect, result)
Example #25
0
def init_trackers():
    """ initialization """
    # get list of trackers
    mapping = DASMapping(config=das_readconfig())
    for provider in mapping.inputvalues_uris():
        TRACKERS[provider['input']] = InputValuesTracker(provider)
Example #26
0
class QLManager(object):
    """
    DAS QL manager.
    """
    def __init__(self, config=None):
        if  not config:
            config = das_readconfig()
        self.dasmapping  = DASMapping(config)
        if  not self.dasmapping.check_maps():
            msg = "No DAS maps found in MappingDB"
            raise Exception(msg)
        self.dasservices = config['services']
        self.daskeysmap  = self.dasmapping.daskeys()
        self.operators   = list(das_operators())
        self.daskeys     = list(das_special_keys())
        self.verbose     = config['verbose']
        self.logger      = PrintManager('QLManger', self.verbose)
        for val in self.daskeysmap.values():
            for item in val:
                self.daskeys.append(item)

    def parse(self, query):
        """
        Parse input query and return query in MongoDB form.
        """
        mongo_query = self.mongo_query(query)
        self.convert2skeys(mongo_query)
        return mongo_query

    def mongo_query(self, query):
        """
        Return mongo query for provided input query
        """
        mongo_query = parse_query(query, self.daskeys, self.dasservices, self.verbose)
        if  set(mongo_query.keys()) & set(['fields', 'spec']) != \
                set(['fields', 'spec']):
            raise Exception('Invalid MongoDB query %s' % mongo_query)
        if  not mongo_query['fields'] and len(mongo_query['spec'].keys()) > 1:
            raise Exception(ambiguous_msg(query, mongo_query['spec'].keys()))
        for key, val in mongo_query['spec'].items():
            if  isinstance(val, list):
                raise Exception(ambiguos_val_msg(query, key, val))
        return mongo_query

    def convert2skeys(self, mongo_query):
        """
        Convert DAS input keys into DAS selection keys.
        """
        if  not mongo_query['spec']:
            for key in mongo_query['fields']:
                for system in self.dasmapping.list_systems():
                    mapkey = self.dasmapping.find_mapkey(system, key)
                    if  mapkey:
                        mongo_query['spec'][mapkey] = '*'
            return
        spec = mongo_query['spec']
        to_replace = []
        for key, val in spec.items():
            for system in self.dasmapping.list_systems():
                mapkey = self.dasmapping.find_mapkey(system, key, val)
                if  mapkey and mapkey != key and \
                    key in mongo_query['spec']:
                    to_replace.append((key, mapkey))
                    continue
        for key, mapkey in to_replace:
            if  key in mongo_query['spec']:
                mongo_query['spec'][mapkey] = mongo_query['spec'][key]
                del mongo_query['spec'][key]
        
    def services(self, query):
        """Find out DAS services to use for provided query"""
        skeys, cond = decompose(query)
        if  not skeys:
            skeys = []
        if  isinstance(skeys, str):
            skeys = [skeys]
        slist = []
        # look-up services from Mapping DB
        for key in skeys + [i for i in cond.keys()]:
            for service, keys in self.daskeysmap.items():
                if  service not in self.dasservices:
                    continue
                value = cond.get(key, None)
                daskeys = self.dasmapping.find_daskey(service, key, value)
                if  set(keys) & set(daskeys) and service not in slist:
                    slist.append(service)
        # look-up special key condition
        requested_system = query.get('system', None)
        if  requested_system:
            if  isinstance(requested_system, basestring):
                requested_system = [requested_system]
            return list( set(slist) & set(requested_system) )
        return slist

    def service_apis_map(self, query):
        """
        Find out which APIs correspond to provided query.
        Return a map of found services and their apis.
        """
        skeys, cond = decompose(query)
        if  not skeys:
            skeys = []
        if  isinstance(skeys, str):
            skeys = [skeys]
        adict = {}
        mapkeys = [key for key in cond.keys() if key not in das_special_keys()]
        services = self.services(query)
        for srv in services:
            alist = self.dasmapping.list_apis(srv)
            for api in alist:
                daskeys = self.dasmapping.api_info(srv, api)['das_map']
                maps = [r['rec_key'] for r in daskeys]
                if  set(mapkeys) & set(maps) == set(mapkeys): 
                    if  srv in adict:
                        new_list = adict[srv] + [api]
                        adict[srv] = list( set(new_list) )
                    else:
                        adict[srv] = [api]
        return adict

    def params(self, query):
        """
        Return dictionary of parameters to be used in DAS Core:
        selection keys, conditions and services.
        """
        skeys, cond = decompose(query)
        services = []
        for srv in self.services(query):
            if  srv not in services:
                services.append(srv)
        return dict(selkeys=skeys, conditions=cond, services=services)
Example #27
0
class testCMSFakeDataServices(unittest.TestCase):
    """
    A test class for the DAS core module
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
#        config        = deepcopy(das_readconfig())
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://localhost:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs', 'phedex', 'sitedb', 'google_maps', 'ip']

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # create fresh DB
        self.clear_collections()
        self.mgr.delete_db_collection()
        self.mgr.create_db()

        # Add fake mapping records
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs', 'dbs.yml')
        self.add_service('phedex', 'phedex.yml')
        self.add_service('sitedb', 'sitedb.yml')

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()

    def add_service(self, system, ymlfile):
        """
        Add Fake data service mapping records. We provide system name
        which match corresponding name in DASTestDataService and
        associated with this system YML map file.
        """
        fname  = os.path.join(DASPATH, 'services/maps/%s' % ymlfile)
        url    = self.base + '/%s' % system
        for record in read_service_map(fname):
            record['url'] = url
            record['system'] = system
            self.mgr.add(record)
        for record in read_service_map(fname, 'notations'):
            record['system'] = system
            self.mgr.add(record)

    def clear_collections(self):
        """clean-up test collections"""
        conn = Connection(host=self.dburi)
        for dbname in ['mapping', 'analytics', 'das', 'parser', 'keylearning']:
            db = conn[dbname]
            if  dbname != 'das':
                db.drop_collection(self.collname)
            else:
                db.drop_collection(self.dascache)
                db.drop_collection(self.dasmerge)
                db.drop_collection(self.dasmr)
            

    def tearDown(self):
        """Invoke after each test"""
        self.server.stop()
#        self.mgr.delete_db_collection()
#        self.clear_collections()

    def testDBSService(self):
        """test DASCore with test DBS service"""
        query  = "primary_dataset=abc" # invoke query to fill DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "primary_dataset=abc" # invoke query to get results from DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('primary_dataset.name')
        expect = 'abc'
        self.assertEqual(expect, result)

    def testPhedexAndSiteDBServices(self):
        """test DASCore with test PhEDEx and SiteDB services"""
        query  = "site=T3_US_Cornell" # invoke query to fill DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "site=T3_US_Cornell | grep site.name" # invoke query to get results from DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = 'T3_US_Cornell'
        self.assertEqual(expect, DotDict(result[0]).get('site.name'))
        expect = ['_id', 'das_id', 'site', 'cache_id', 'das', 'qhash']
        expect.sort()
        rkeys = result[0].keys()
        rkeys.sort()
        self.assertEqual(expect, rkeys)

    def testAggregators(self):
        """test DASCore aggregators via zip service"""
        query  = "zip=1000"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "zip=1000 | count(zip.place.city)"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = {"function": "count", "result": {"value": 2}, 
                  "key": "zip.place.city", "_id":0}
        self.assertEqual(expect, result[0])

    def testIPService(self):
        """test DASCore with IP service"""
        query  = "ip=137.138.141.145"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "ip=137.138.141.145 | grep ip.address"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('ip.address')
        expect = '137.138.141.145'
        self.assertEqual(expect, result)

    def testRecords(self):
        """test records DAS keyword with all services"""
        query  = "ip=137.138.141.145"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "site=T3_US_Cornell"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "records | grep ip.address"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('ip.address')
        expect = '137.138.141.145'
        self.assertEqual(expect, result)

        query  = "records | grep site.name"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = 'T3_US_Cornell'
        self.assertEqual(expect, DotDict(result[0]).get('site.name'))

        query  = "records"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        res    = []
        for row in result:
            if  row.has_key('ip'):
                res.append(DotDict(row).get('ip.address'))
            if  row.has_key('site'):
                for item in row['site']:
                    if  item.has_key('name') and item['name'] not in res:
                        res.append(item['name'])
        res.sort()
        expect = ['137.138.141.145', 'T3_US_Cornell']
        self.assertEqual(expect, res)
Example #28
0
def list_dbs_instances():
    """ list all DBS instances """
    from DAS.core.das_mapping_db import DASMapping
    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)
    return dasmapping.dbs_instances()
Example #29
0
def init_trackers():
    """ initialization """
    # get list of trackers
    mapping = DASMapping(config=das_readconfig())
    for provider in mapping.inputvalues_uris():
        TRACKERS[provider['input']] = InputValuesTracker(provider)
Example #30
0
def main():
    "Main function"
    optmgr = DASOptionParser()
    (opts, _args) = optmgr.getOpt()

    dburi     = 'mongodb://%s:%s' % (opts.host, opts.port)
    dasconfig = das_readconfig()
    dbname, colname = opts.db.split('.')
    mongodb   = dict(dburi=dburi)
    mappingdb = dict(dbname=dbname, collname=colname)
    config    = dict(verbose=opts.debug, mappingdb=mappingdb,
                mongodb=mongodb, services=dasconfig['das'].get('services', []))

    mgr = DASMapping(config)

    if  opts.listapis:
        apis = mgr.list_apis(opts.system)
        print apis
        sys.exit(0)

    if  opts.listkeys:
        keys = mgr.daskeys(opts.system)
        print keys
        sys.exit(0)

    if  opts.umap:
        for rec in read_service_map(opts.umap, field='uri'):
            if  opts.debug:
                print rec
            spec = {'url':rec['url'], 'urn':rec['urn']}
            mgr.remove(spec) # remove previous record
            mgr.add(rec)

    if  opts.nmap:
        for rec in read_service_map(opts.nmap, field='notations'):
            if  opts.debug:
                print rec
            system = rec['system']
            spec = {'notations':{'$exists':True}, 'system':system}
            mgr.remove(spec) # remove previous record
            mgr.add(rec)

    if  opts.pmap:
        for rec in read_service_map(opts.pmap, field='presentation'):
            if  opts.debug:
                print rec
            spec = {'presentation':{'$exists':True}}
            mgr.remove(spec) # remove previous record
            mgr.add(rec)

    if  opts.clean:
        mgr.delete_db()
        mgr.create_db()
        # I need to clear DAS cache/merge since I don't know
        # a-priory what kind of changes new maps will bring
        conn   = db_connection(dburi)
        dbname = dasconfig['dasdb']['dbname']
        cache  = conn[dbname][dasconfig['dasdb']['cachecollection']]
        cache.remove({})
        merge  = conn[dbname][dasconfig['dasdb']['mergecollection']]
        merge.remove({})

    if  opts.remove:
        mgr.remove(opts.remove)
Example #31
0
File: das_core.py Project: ktf/DAS
    def __init__(self, config=None, debug=0,
                nores=False, logger=None, engine=None, multitask=True):
        if  config:
            dasconfig = config
        else:
            dasconfig = das_readconfig()
        verbose       = dasconfig['verbose']
        self.stdout   = debug
        if  isinstance(debug, int):
            self.verbose = debug
            dasconfig['verbose'] = debug
        else:
            self.verbose = verbose
        das_timer('DASCore::init', self.verbose)
        self.operators = das_operators()

        # set noresults option
        self.noresults = False
        if  nores:
            dasconfig['write_cache'] = True
            self.noresults = nores

        self.multitask = dasconfig['das'].get('multitask', True)
        if  debug or self.verbose:
            self.multitask = False # in verbose mode do not use multitask
            dasconfig['das']['multitask'] = False
        if  not multitask: # explicitly call DASCore ctor, e.g. in analytics
            self.multitask = False
            dasconfig['das']['multitask'] = False
        dasconfig['engine'] = engine
        if  self.multitask:
            nworkers = dasconfig['das'].get('core_workers', 5)
            if  engine:
                thr_name = 'DASCore:PluginTaskManager'
                self.taskmgr = PluginTaskManager(\
                        engine, nworkers=nworkers, name=thr_name)
                self.taskmgr.subscribe()
            else:
                thr_name = 'DASCore:TaskManager'
                self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
        else:
            self.taskmgr = None

        if  logger:
            self.logger = logger
        else:
            self.logger = PrintManager('DASCore', self.verbose)

        # define Mapping/Analytics/Parser in this order since Parser depends
        # on first two
        dasmapping = DASMapping(dasconfig)
        dasconfig['dasmapping'] = dasmapping
        self.mapping = dasmapping

        self.analytics = DASAnalytics(dasconfig)
        dasconfig['dasanalytics'] = self.analytics

        self.keylearning = DASKeyLearning(dasconfig)
        dasconfig['keylearning'] = self.keylearning

        # init DAS cache
        self.rawcache = DASMongocache(dasconfig)
        dasconfig['rawcache'] = self.rawcache

        # plug-in architecture: loop over registered data-services in
        # dasconfig; load appropriate module/class; register data
        # service with DASCore.
        self.systems = dasmapping.list_systems()
        # pointer to the DAS top level directory
        dasroot = '/'.join(__file__.split('/')[:-3])
        for name in self.systems:
            try:
                klass  = 'DAS/services/%s/%s_service.py' \
                    % (name, name)
                srvfile = os.path.join(dasroot, klass)
                with file(srvfile) as srvclass:
                    for line in srvclass:
                        if  line.find('(DASAbstractService)') != -1:
                            klass = line.split('(DASAbstractService)')[0]
                            klass = klass.split('class ')[-1] 
                            break
                mname  = 'DAS.services.%s.%s_service' % (name, name)
                module = __import__(mname, fromlist=[klass])
                obj = getattr(module, klass)(dasconfig)
                setattr(self, name, obj)
                SERVICES[name] = obj
            except IOError as err:
                if  debug > 1:
                    # we have virtual services, so IOError can be correct
                    print_exc(err)
                try:
                    mname  = 'DAS.services.generic_service'
                    module = __import__(mname, fromlist=['GenericService'])
                    obj    = module.GenericService(name, dasconfig)
                    setattr(self, name, obj)
                except Exception as exc:
                    print_exc(exc)
                    msg = "Unable to load %s data-service plugin" % name
                    raise Exception(msg)
            except Exception as exc:
                print_exc(exc)
                msg = "Unable to load %s data-service plugin" % name
                raise Exception(msg)

        # loop over systems and get system keys, add mapping keys to final list
        self.service_keys = {}
        self.service_parameters = {}
        for name in self.systems: 
            skeys = getattr(self, name).keys()
            self.service_keys[getattr(self, name).name] = skeys
            sparams = getattr(self, name).parameters()
            self.service_parameters[getattr(self, name).name] = sparams

        self.service_keys['special'] = das_special_keys()
        self.dasconfig = dasconfig
        das_timer('DASCore::init', self.verbose)
Example #32
0
class testDASMapping(unittest.TestCase):
    """
    A test class for the DAS mappingdb class
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.db = 'test_mapping.db'
        config = deepcopy(das_readconfig())
        dburi = config['mongodb']['dburi']
        logger = PrintManager('TestDASMapping', verbose=debug)
        config['logger'] = logger
        config['verbose'] = debug
        dbname = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi,
                                   dbname=dbname,
                                   collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {
            "presentation": {
                "block": [{
                    "ui": "Block name",
                    "das": "block.name"
                }, {
                    "ui": "Block size",
                    "das": "block.size"
                }]
            },
            "type": "presentation"
        }
        self.coll.insert(self.pmap)

        url = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader/acquisitioneras/'
        dformat = 'JSON'
        system = 'dbs3'
        expire = 100
        rec = {
            'system':
            system,
            'urn':
            'acquisitioneras',
            'format':
            dformat,
            'instances': ['prod/global'],
            'url':
            url,
            'expire':
            expire,
            'lookup':
            'era',
            'params': {},
            'das_map': [{
                "das_key": "era",
                "rec_key": "era.name",
                "api_arg": "era"
            }],
            'type':
            'service'
        }
        self.coll.insert(rec)

        ver_token = verification_token(self.coll.find(**PYMONGO_OPTS))
        rec = {'verification_token': ver_token, 'type': 'verification_token'}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)

    def tearDown(self):
        """Invoke after each test"""
        self.mgr.delete_db()

    def test_api(self):
        """test methods for api table"""
        self.mgr.delete_db()

        system = 'dbs3'
        url = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader'
        dformat = 'JSON'
        expire = 100
        instances = ["prod/global", "prod/phys01"]

        api = 'primarydatasets'
        params = {"primary_ds_name": "*"}
        rec = {
            'system':
            system,
            'urn':
            api,
            'format':
            dformat,
            'url':
            url,
            'instances':
            instances,
            'params':
            params,
            'expire':
            expire,
            'lookup':
            'primary_dataset',
            'wild_card':
            '*',
            'das_map': [
                dict(das_key='primary_dataset',
                     rec_key='primary_dataset.name',
                     api_arg='primary_dataset')
            ],
            'type':
            'service'
        }
        self.mgr.add(rec)
        smap = {
            api: {
                'url': url,
                'expire': expire,
                'keys': ['primary_dataset'],
                'format': dformat,
                'wild_card': '*',
                'cert': None,
                'ckey': None,
                'services': '',
                'lookup': 'primary_dataset',
                'params': params
            }
        }

        rec = {
            'system':
            system,
            'urn':
            'datasetaccesstypes',
            'format':
            dformat,
            'instances':
            instances,
            'url':
            url,
            'expire':
            expire,
            'lookup':
            'status',
            'params': {
                'status': '*'
            },
            'das_map': [{
                "das_key": "status",
                "rec_key": "status.name",
                "api_arg": "status"
            }],
            'type':
            'service'
        }
        self.mgr.add(rec)

        api = 'datasetaccesstypes'
        daskey = 'status'
        rec_key = 'status.name'
        api_input = 'status'

        res = self.mgr.list_systems()
        self.assertEqual([system], res)

        res = self.mgr.list_apis()
        res.sort()
        self.assertEqual(['datasetaccesstypes', 'primarydatasets'], res)

        res = self.mgr.lookup_keys(system, api, daskey)
        self.assertEqual([rec_key], res)

        value = ''
        res = self.mgr.das2api(system, api, rec_key, value)
        self.assertEqual([api_input], res)

        # adding another params which default is None
        res = self.mgr.das2api(system, api, rec_key, value)
        self.assertEqual([api_input], res)

        res = self.mgr.api2das(system, api_input)
        self.assertEqual([daskey], res)

        # adding notations
        notations = {
            'system':
            system,
            'type':
            'notation',
            'notations': [
                {
                    'api_output': 'storage_element_name',
                    'rec_key': 'se',
                    'api': ''
                },
                {
                    'api_output': 'number_of_events',
                    'rec_key': 'nevents',
                    'api': ''
                },
            ]
        }
        self.mgr.add(notations)

        res = self.mgr.notation2das(system, 'number_of_events')
        self.assertEqual('nevents', res)

        # API keys
        res = self.mgr.api2daskey(system, api)
        self.assertEqual(['status'], res)

        # build service map
        smap.update({
            api: {
                'url': url,
                'expire': expire,
                'cert': None,
                'ckey': None,
                'keys': ['status'],
                'format': dformat,
                'wild_card': '*',
                'services': '',
                'lookup': daskey,
                'params': {
                    "status": "*"
                }
            }
        })
        res = self.mgr.servicemap(system)
        self.assertEqual(smap, res)

    def test_presentation(self):
        """test presentation method"""
        expect = self.pmap['presentation']['block']
        result = self.mgr.presentation('block')
        self.assertEqual(expect, result)

    def test_notations(self):
        """test notations method"""
        system = "test"
        rec = {
            'notations': [{
                "api_output": "site.resource_element.cms_name",
                "rec_key": "site.name",
                "api": ""
            }, {
                "api_output": "site.resource_pledge.cms_name",
                "rec_key": "site.name",
                "api": ""
            }, {
                "api_output": "admin.contacts.cms_name",
                "rec_key": "site.name",
                "api": ""
            }],
            "system":
            system,
            "type":
            "notation"
        }
        self.mgr.add(rec)
        expect = rec['notations']
        result = self.mgr.notations(system)[system]
        self.assertEqual(expect, result)