Exemple #1
0
    def __init__(self, config=None):
        if  not config:
            config = das_readconfig()
        if  not config.has_key('dasmapping'):
            config['dasmapping'] = DASMapping(config)
        if  not config.has_key('dasanalytics'):
            config['dasanalytics'] = DASAnalytics(config)
        if  not config['dasmapping'].check_maps():
            msg = "No DAS maps found in MappingDB"
            raise Exception(msg)
        self.map         = config['dasmapping']
        self.analytics   = config['dasanalytics']
        self.dasservices = config['services']
        self.daskeysmap  = self.map.daskeys()
        self.operators   = list(das_operators())
        self.daskeys     = list(das_special_keys())
        self.verbose     = config['verbose']
        self.logger      = PrintManager('QLManger', self.verbose)
        for val in self.daskeysmap.values():
            for item in val:
                self.daskeys.append(item)
        parserdir   = config['das']['parserdir']
        self.dasply = DASPLY(parserdir, self.daskeys, self.dasservices, 
                verbose=self.verbose)

        self.enabledb = config['parserdb']['enable']
        if  self.enabledb:
            self.parserdb = DASParserDB(config)
Exemple #2
0
 def service_apis_map(self, query):
     """
     Find out which APIs correspond to provided query.
     Return a map of found services and their apis.
     """
     skeys, cond = decompose(query)
     if not skeys:
         skeys = []
     if isinstance(skeys, str):
         skeys = [skeys]
     adict = {}
     mapkeys = [
         key for key in list(cond.keys()) if key not in das_special_keys()
     ]
     services = self.services(query)
     for srv in services:
         alist = self.dasmapping.list_apis(srv)
         for api in alist:
             daskeys = self.dasmapping.api_info(srv, api)['das_map']
             maps = [r['rec_key'] for r in daskeys]
             if set(mapkeys) & set(maps) == set(mapkeys):
                 if srv in adict:
                     new_list = adict[srv] + [api]
                     adict[srv] = list(set(new_list))
                 else:
                     adict[srv] = [api]
     return adict
Exemple #3
0
 def service_apis_map(self, query):
     """
     Find out which APIs correspond to provided query.
     Return a map of found services and their apis.
     """
     skeys, cond = decompose(query)
     if  not skeys:
         skeys = []
     if  isinstance(skeys, str):
         skeys = [skeys]
     adict = {}
     mapkeys = [key for key in cond.keys() if key not in das_special_keys()]
     services = self.services(query)
     for srv in services:
         alist = self.dasmapping.list_apis(srv)
         for api in alist:
             daskeys = self.dasmapping.api_info(srv, api)['das_map']
             maps = [r['rec_key'] for r in daskeys]
             if  set(mapkeys) & set(maps) == set(mapkeys): 
                 if  srv in adict:
                     new_list = adict[srv] + [api]
                     adict[srv] = list( set(new_list) )
                 else:
                     adict[srv] = [api]
     return adict
Exemple #4
0
    def __init__(self, parserdir, daskeys, dassystems,
                 operators=None, specials=None, filters=None,
                 aggregators=None, mapreduces=None, verbose=0):
        self.daskeys = daskeys
        self.verbose = verbose
        self.lexer   = None # defined at run time using self.build()
        self.parser  = None # defined at run time using self.build()

        self.dassystems = dassystems
        self.parsertab_dir = parserdir
        if  not os.path.isdir(self.parsertab_dir):
            msg = 'Directory %s does not exists' % self.parsertab_dir
            raise Exception(msg)
        
        # test if we have been given a list of desired operators/filters
        # /aggregators, if not get the lists from das_ql
        operators = operators if operators else das_operators()
        filters = filters if filters else das_filters()
        aggregators = aggregators if aggregators else das_aggregators()
        mapreduces = mapreduces if mapreduces else das_mapreduces()
        specials = specials if specials else das_special_keys()
        
        # build a map of token->token.type which we can use in the
        # enlarged VALUE rule
        self.tokenmap = {}
        for o in operators:
            self.tokenmap[o] = 'OPERATOR'
        for f in filters:
            self.tokenmap[f] = 'FILTER'
        for a in aggregators:
            self.tokenmap[a] = 'AGGREGATOR'
        for m in mapreduces:
            self.tokenmap[m] = 'MAPREDUCE'
        for s in specials:
            self.tokenmap[s] = 'SPECIALKEY'
Exemple #5
0
 def __init__(self,
              daskeys,
              dassystems,
              operators=None,
              specials=None,
              filters=None,
              aggregators=None,
              mapreduces=None,
              verbose=0):
     super(DASQueryParser, self).__init__()
     self.daskeys = daskeys + ['queries', 'records']
     self.verbose = verbose
     self.dassystems = dassystems
     # test if we have been given a list of desired operators/filters
     # /aggregators, if not get the lists from das_ql
     self.operators = operators if operators else das_operators()
     self.filters = filters if filters else das_filters()
     self.aggregators = aggregators if aggregators else das_aggregators()
     self.mapreduces = mapreduces if mapreduces else das_mapreduces()
     self.specials = specials if specials else das_special_keys()
     if self.verbose:
         print("operators", self.operators)
         print('filters', self.filters)
         print('mapreduces', self.mapreduces)
         print('specials', self.specials)
     self.daskeys += self.specials
Exemple #6
0
 def __init__(self, config=None):
     if not config:
         config = das_readconfig()
     self.dasmapping = DASMapping(config)
     if not self.dasmapping.check_maps():
         msg = "No DAS maps found in MappingDB"
         raise Exception(msg)
     self.dasservices = config['services']
     self.daskeysmap = self.dasmapping.daskeys()
     self.operators = list(das_operators())
     self.daskeys = list(das_special_keys())
     self.verbose = config['verbose']
     self.logger = PrintManager('QLManger', self.verbose)
     for val in self.daskeysmap.values():
         for item in val:
             self.daskeys.append(item)
Exemple #7
0
 def __init__(self, config=None):
     if  not config:
         config = das_readconfig()
     self.dasmapping  = DASMapping(config)
     if  not self.dasmapping.check_maps():
         msg = "No DAS maps found in MappingDB"
         raise Exception(msg)
     self.dasservices = config['services']
     self.daskeysmap  = self.dasmapping.daskeys()
     self.operators   = list(das_operators())
     self.daskeys     = list(das_special_keys())
     self.verbose     = config['verbose']
     self.logger      = PrintManager('QLManger', self.verbose)
     for val in self.daskeysmap.values():
         for item in val:
             self.daskeys.append(item)
Exemple #8
0
 def __init__(self, daskeys, dassystems,
              operators=None, specials=None, filters=None,
              aggregators=None, mapreduces=None, verbose=0):
     super(DASQueryParser, self).__init__()
     self.daskeys = daskeys + ['queries', 'records']
     self.verbose = verbose
     self.dassystems = dassystems
     # test if we have been given a list of desired operators/filters
     # /aggregators, if not get the lists from das_ql
     self.operators = operators if operators else das_operators()
     self.filters = filters if filters else das_filters()
     self.aggregators = aggregators if aggregators else das_aggregators()
     self.mapreduces = mapreduces if mapreduces else das_mapreduces()
     self.specials = specials if specials else das_special_keys()
     if  self.verbose:
         print("operators", self.operators)
         print('filters', self.filters)
         print('mapreduces', self.mapreduces)
         print('specials', self.specials)
     self.daskeys += self.specials
Exemple #9
0
    def __init__(self, config=None, debug=0,
                nores=False, logger=None, engine=None, multitask=True):
        if  config:
            dasconfig = config
        else:
            dasconfig = das_readconfig()
        verbose       = dasconfig['verbose']
        self.stdout   = debug
        if  isinstance(debug, int):
            self.verbose = debug
            dasconfig['verbose'] = debug
        else:
            self.verbose = verbose
        das_timer('DASCore::init', self.verbose)
        self.operators = das_operators()

        # set noresults option
        self.noresults = False
        if  nores:
            dasconfig['write_cache'] = True
            self.noresults = nores

        self.multitask = dasconfig['das'].get('multitask', True)
        if  debug or self.verbose:
            self.multitask = False # in verbose mode do not use multitask
            dasconfig['das']['multitask'] = False
        if  not multitask: # explicitly call DASCore ctor, e.g. in analytics
            self.multitask = False
            dasconfig['das']['multitask'] = False
        dasconfig['engine'] = engine
        if  self.multitask:
            nworkers = dasconfig['das'].get('core_workers', 5)
            if  engine:
                thr_name = 'DASCore:PluginTaskManager'
                self.taskmgr = PluginTaskManager(\
                        engine, nworkers=nworkers, name=thr_name)
                self.taskmgr.subscribe()
            else:
                thr_name = 'DASCore:TaskManager'
                self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
        else:
            self.taskmgr = None

        if  logger:
            self.logger = logger
        else:
            self.logger = PrintManager('DASCore', self.verbose)

        # define Mapping/Analytics/Parser in this order since Parser depends
        # on first two
        dasmapping = DASMapping(dasconfig)
        dasconfig['dasmapping'] = dasmapping
        self.mapping = dasmapping

        self.analytics = DASAnalytics(dasconfig)
        dasconfig['dasanalytics'] = self.analytics

        self.keylearning = DASKeyLearning(dasconfig)
        dasconfig['keylearning'] = self.keylearning

        # init DAS cache
        self.rawcache = DASMongocache(dasconfig)
        dasconfig['rawcache'] = self.rawcache

        # plug-in architecture: loop over registered data-services in
        # dasconfig; load appropriate module/class; register data
        # service with DASCore.
        self.systems = dasmapping.list_systems()
        # pointer to the DAS top level directory
        dasroot = '/'.join(__file__.split('/')[:-3])
        for name in self.systems:
            try:
                klass  = 'DAS/services/%s/%s_service.py' \
                    % (name, name)
                srvfile = os.path.join(dasroot, klass)
                with file(srvfile) as srvclass:
                    for line in srvclass:
                        if  line.find('(DASAbstractService)') != -1:
                            klass = line.split('(DASAbstractService)')[0]
                            klass = klass.split('class ')[-1] 
                            break
                mname  = 'DAS.services.%s.%s_service' % (name, name)
                module = __import__(mname, fromlist=[klass])
                obj = getattr(module, klass)(dasconfig)
                setattr(self, name, obj)
                SERVICES[name] = obj
            except IOError as err:
                if  debug > 1:
                    # we have virtual services, so IOError can be correct
                    print_exc(err)
                try:
                    mname  = 'DAS.services.generic_service'
                    module = __import__(mname, fromlist=['GenericService'])
                    obj    = module.GenericService(name, dasconfig)
                    setattr(self, name, obj)
                except Exception as exc:
                    print_exc(exc)
                    msg = "Unable to load %s data-service plugin" % name
                    raise Exception(msg)
            except Exception as exc:
                print_exc(exc)
                msg = "Unable to load %s data-service plugin" % name
                raise Exception(msg)

        # loop over systems and get system keys, add mapping keys to final list
        self.service_keys = {}
        self.service_parameters = {}
        for name in self.systems: 
            skeys = getattr(self, name).keys()
            self.service_keys[getattr(self, name).name] = skeys
            sparams = getattr(self, name).parameters()
            self.service_parameters[getattr(self, name).name] = sparams

        self.service_keys['special'] = das_special_keys()
        self.dasconfig = dasconfig
        das_timer('DASCore::init', self.verbose)
Exemple #10
0
    def __init__(self,
                 config=None,
                 debug=0,
                 nores=False,
                 logger=None,
                 engine=None,
                 multitask=True):
        if config:
            dasconfig = config
        else:
            dasconfig = das_readconfig()
        verbose = dasconfig['verbose']
        self.stdout = debug
        if isinstance(debug, int) and debug:
            self.verbose = debug
            dasconfig['verbose'] = debug
        else:
            self.verbose = verbose
        das_timer('DASCore::init', self.verbose)
        self.operators = das_operators()
        self.collect_wait_time = dasconfig['das'].get('collect_wait_time', 120)

        # set noresults option
        self.noresults = False
        if nores:
            dasconfig['write_cache'] = True
            self.noresults = nores

        self.init_expire = dasconfig['das'].get('init_expire', 5 * 60)
        self.multitask = dasconfig['das'].get('multitask', True)
        if debug or self.verbose:
            self.multitask = False  # in verbose mode do not use multitask
            dasconfig['das']['multitask'] = False
        if not multitask:  # explicitly call DASCore ctor
            self.multitask = False
            dasconfig['das']['multitask'] = False
        dasconfig['engine'] = engine
        if self.multitask:
            nworkers = dasconfig['das'].get('core_workers', 5)
            #             if  engine:
            #                 thr_name = 'DASCore:PluginTaskManager'
            #                 self.taskmgr = PluginTaskManager(\
            #                         engine, nworkers=nworkers, name=thr_name)
            #                 self.taskmgr.subscribe()
            #             else:
            #                 thr_name = 'DASCore:TaskManager'
            #                 self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
            thr_name = 'DASCore:TaskManager'
            self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
        else:
            self.taskmgr = None

        if logger:
            self.logger = logger
        else:
            self.logger = PrintManager('DASCore', self.verbose)

        # define Mapping/Analytics/Parser in this order since Parser depends
        # on first two
        dasmapping = DASMapping(dasconfig)
        dasconfig['dasmapping'] = dasmapping
        self.mapping = dasmapping

        self.keylearning = DASKeyLearning(dasconfig)
        dasconfig['keylearning'] = self.keylearning

        # init DAS cache
        self.rawcache = DASMongocache(dasconfig)
        dasconfig['rawcache'] = self.rawcache

        # plug-in architecture: loop over registered data-services in
        # dasconfig; load appropriate module/class; register data
        # service with DASCore.
        self.systems = dasmapping.list_systems()
        # pointer to the DAS top level directory
        dasroot = '/'.join(__file__.split('/')[:-3])
        for name in self.systems:
            try:
                klass  = 'DAS/services/%s/%s_service.py' \
                    % (name, name)
                srvfile = os.path.join(dasroot, klass)
                with open(srvfile) as srvclass:
                    for line in srvclass:
                        if line.find('(DASAbstractService)') != -1:
                            klass = line.split('(DASAbstractService)')[0]
                            klass = klass.split('class ')[-1]
                            break
                mname = 'DAS.services.%s.%s_service' % (name, name)
                module = __import__(mname, fromlist=[klass])
                obj = getattr(module, klass)(dasconfig)
                setattr(self, name, obj)
            except IOError as err:
                if debug > 1:
                    # we have virtual services, so IOError can be correct
                    print_exc(err)
                try:
                    mname = 'DAS.services.generic_service'
                    module = __import__(mname, fromlist=['GenericService'])
                    obj = module.GenericService(name, dasconfig)
                    setattr(self, name, obj)
                except Exception as exc:
                    print_exc(exc)
                    msg = "Unable to load %s data-service plugin" % name
                    raise Exception(msg)
            except Exception as exc:
                print_exc(exc)
                msg = "Unable to load %s data-service plugin" % name
                raise Exception(msg)

        # loop over systems and get system keys, add mapping keys to final list
        self.service_keys = {}
        self.service_parameters = {}
        for name in self.systems:
            skeys = list(getattr(self, name).keys())
            self.service_keys[getattr(self, name).name] = skeys
            sparams = getattr(self, name).parameters()
            self.service_parameters[getattr(self, name).name] = sparams

        self.service_keys['special'] = das_special_keys()
        self.dasconfig = dasconfig
        das_timer('DASCore::init', self.verbose)
Exemple #11
0
    def apimap(self, dasquery):
        """
        Analyze input query and yield url, api, args, format, expire
        for further processing.
        """
        cond  = getarg(dasquery.mongo_query, 'spec', {})
        instance = dasquery.mongo_query.get('instance', self.dbs_global)
        skeys = getarg(dasquery.mongo_query, 'fields', [])
        if  not skeys:
            skeys = []
        self.logger.info("\n")
        for api, value in self.map.iteritems():
            expire = value['expire']
            iformat = value['format']
            url    = self.adjust_url(value['url'], instance)
            args   = dict(value['params']) # make new copy, since we'll adjust
            wild   = value.get('wild_card', '*')
            found  = 0
            for key, val in cond.iteritems():
                # check if key is a special one
                if  key in das_special_keys():
                    found += 1
                # check if keys from conditions are accepted by API.
                if  self.dasmapping.check_dasmap(self.name, api, key, val):
                    # need to convert key (which is daskeys.map) into
                    # input api parameter
                    for apiparam in \
                        self.dasmapping.das2api(self.name, key, val, api):
                        if  args.has_key(apiparam):
                            args[apiparam] = val
                            found += 1
                else:
                    found = 0
                    break # condition key does not map into API params
            self.adjust_params(api, args, instance)
            if  not found:
                msg = "--- rejects API %s, parameters don't match" % api
                self.logger.info(msg)
                msg = 'args=%s' % args
                self.logger.debug(msg)
                continue
            # delete args keys whose value is optional
            delete_keys(args, 'optional')
            # check that there is no "required" parameter left in args,
            # since such api will not work
            if 'required' in args.values():
                msg = '--- rejects API %s, parameter is required' % api
                self.logger.info(msg)
                msg = 'args=%s' % args
                self.logger.debug(msg)
                continue
            # adjust pattern symbols in arguments
            if  wild != '*':
                for key, val in args.iteritems():
                    if  isinstance(val, str) or isinstance(val, unicode):
                        val   = val.replace('*', wild)
                    args[key] = val

            prim_key = self.dasmapping.primary_key(self.name, api)
            if  prim_key not in skeys:
                msg = "--- rejects API %s, primary_key %s is not selected"\
                        % (api, prim_key)
                self.logger.info(msg)
                continue

            msg = '+++ %s passes API %s' % (self.name, api)
            self.logger.info(msg)
            msg = 'args=%s' % args
            self.logger.debug(msg)

            msg  = "yield "
            msg += "system ***%s***, url=%s, api=%s, args=%s, format=%s, " \
                % (self.name, url, api, args, iformat)
            msg += "expire=%s, wild_card=%s" \
                % (expire, wild)
            self.logger.debug(msg)

            yield url, api, args, iformat, expire