コード例 #1
0
ファイル: runregistry_service.py プロジェクト: dmwm/DAS
 def apicall(self, dasquery, url, api, args, dformat, expire):
     """
     A service worker. It parses input query, invoke service API 
     and return results in a list with provided row.
     """
     _query  = ''
     _table  = 'runsummary'
     if  api == 'rr_xmlrpc_lumis':
         _table = 'runlumis'
     for key, val in dasquery.mongo_query['spec'].items():
         if  key == 'run.run_number':
             if  isinstance(val, int):
                 _query = {'runNumber': '%s' % val}
             elif isinstance(val, dict):
                 minrun = 0
                 maxrun = 0
                 for kkk, vvv in val.items():
                     if  kkk == '$in':
                         runs = ' or '.join([str(r) for r in vvv])
                         _query = {'runNumber': runs}
                     elif kkk == '$lte':
                         maxrun = vvv
                     elif kkk == '$gte':
                         minrun = vvv
                 if  minrun and maxrun:
                     _query = {'runNumber': '>= %s and < %s' % (minrun, maxrun)}
         elif key == 'date':
             if  isinstance(val, dict):
                 if  '$in' in val:
                     value = val['$in']
                 elif '$lte' in val and '$gte' in val:
                     value = (val['$gte'], val['$lte'])
                 else:
                     msg = 'Unable to get the value from %s=%s' \
                             % (key, val) 
                     raise Exception(msg)
                 try:
                     date1 = convert_datetime(value[0])
                     date2 = convert_datetime(value[-1])
                 except:
                     msg = 'Unable to convert to datetime format, %s' \
                         % value
                     raise Exception(msg)
             elif  isinstance(val, str) or isinstance(val, unicode):
                 date1, date2 = convert2date(val)
                 date1 = rr_date(date1)
                 date2 = rr_date(date2)
             else:
                 date1 = convert_datetime(val)
                 date2 = convert_datetime(val + 24*60*60)
             run_time = '>= %s and < %s' % (date1, date2)
             _query = {'runStartTime': run_time}
         else:
             msg  = 'RunRegistryService::api\n\n'
             msg += "--- %s reject API %s, parameters don't match, args=%s" \
                     % (self.name, api, args)
             self.logger.info(msg)
             return
     if  not _query:
         msg = 'Unable to match input parameters with input query'
         raise Exception(msg)
     if  'run' in args and isinstance(args['run'], dict):
         args['run'] = str(args['run'])
     msg = "DASAbstractService:RunRegistry, query=%s" % _query
     self.logger.info(msg)
     time0   = time.time()
     rawrows = rr_worker(url, _query, _table)
     genrows = self.translator(api, rawrows)
     if  _table == 'runsummary':
         dasrows = run_duration(genrows)
     else:
         dasrows = collect_lumis(genrows)
     ctime   = time.time() - time0
     try:
         self.write_to_cache(\
             dasquery, expire, url, api, args, dasrows, ctime)
     except Exception as exc:
         print_exc(exc)
コード例 #2
0
ファイル: dbs_service.py プロジェクト: zdenekmaxa/DAS
    def adjust_params(self, api, kwds, inst=None):
        """
        Adjust DBS2 parameters for specific query requests
        """
        if  api == 'fakeRun4Block':
            val = kwds['block']
            if  val != 'required':
                kwds['query'] = 'find run where block=%s' % val
            else:
                kwds['query'] = 'required'
            kwds.pop('block')
        if  api == 'fakeStatus':
            val = kwds['status']
            if  val:
                kwds['query'] = \
                'find dataset.status where dataset.status=%s' % val
            else:
                kwds['query'] = 'find dataset.status'
            val = kwds['dataset']
            if  val:
                if  kwds['query'].find(' where ') != -1:
                    kwds['query'] += ' and dataset=%s' % val
                else:
                    kwds['query'] += ' where dataset=%s' % val
            kwds.pop('status')
        if  api == 'listPrimaryDatasets':
            pat = kwds['pattern']
            if  pat[0] == '/':
                kwds['pattern'] = pat.split('/')[1]
        if  api == 'listProcessedDatasets':
            pat = kwds['processed_datatset_name_pattern']
            if  pat[0] == '/':
                try:
                    kwds['processed_datatset_name_pattern'] = pat.split('/')[2]
                except:
                    pass
        if  api == 'fakeReleases':
            val = kwds['release']
            if  val != 'required':
                kwds['query'] = 'find release where release=%s' % val
            else:
                kwds['query'] = 'required'
            kwds.pop('release')
        if  api == 'fakeRelease4File':
            val = kwds['file']
            if  val != 'required':
                kwds['query'] = 'find release where file=%s' % val
            else:
                kwds['query'] = 'required'
            kwds.pop('file')
        if  api == 'fakeRelease4Dataset':
            val = kwds['dataset']
            if  val != 'required':
                kwds['query'] = 'find release where dataset=%s' % val
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeConfig':
            val = kwds['dataset']
            sel = 'config.name, config.content, config.version, config.type, \
 config.annotation, config.createdate, config.createby, config.moddate, \
 config.modby'
            if  val != 'required':
                kwds['query'] = 'find %s where dataset=%s' % (sel, val)
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeSite4Dataset' and inst and inst != self.prim_instance:
            val = kwds['dataset']
            if  val != 'required':
                kwds['query'] = "find site where dataset=%s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeListDataset4File':
            val = kwds['file']
            if  val != 'required':
                kwds['query'] = "find dataset, count(block), count(file.size), \
  sum(block.size), sum(block.numfiles), sum(block.numevents), dataset.status \
  where file=%s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('file')
        if  api == 'fakeListDataset4Block':
            val = kwds['block']
            if  val != 'required':
                kwds['query'] = "find dataset, count(block), count(file.size), \
  sum(block.size), sum(block.numfiles), sum(block.numevents) \
  where block=%s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('block')
        if  api == 'fakeRun4Run':#runregistry don't support 'in'
            val = kwds['run']
            if  val != 'required':
                if  isinstance(val, dict):
                    min_run = 0
                    max_run = 0
                    if  val.has_key('$lte'):
                        max_run = val['$lte']
                    if  val.has_key('$gte'):
                        min_run = val['$gte']
                    if  min_run and max_run:
                        val = "run >=%s and run <= %s" % (min_run, max_run)
                    elif val.has_key('$in'):
                        arr = [r for r in val['$in']]
                        val = "run >=%s and run <= %s" % (arr[0], arr[-1])
                elif isinstance(val, int):
                    val = "run = %d" % val
                kwds['query'] = "find run where %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('run')
        if  api == 'fakeBlock4DatasetRun':
            dataset = kwds.get('dataset', 'required')
            run = kwds.get('run', 'required')
            if  dataset != 'required' and run != 'required':
                kwds['query'] = 'find block.name where dataset=%s and run=%s'\
                        % (dataset, run)
            else:
                kwds['query'] = 'required'
        if  api == 'fakeGroup4Dataset':
            val = kwds['dataset']
            if  val != 'required':
                val = "dataset = %s" % val
                kwds['query'] = "find phygrp where %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeChild4File':
            val = kwds['file']
            if  val != 'required':
                val = "file = %s" % val
                kwds['query'] = "find file.child where %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('file')
        if  api == 'fakeChild4Dataset':
            val = kwds['dataset']
            if  val != 'required':
                val = "dataset = %s" % val
                kwds['query'] = "find dataset.child where %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeDataset4Run':#runregistry don't support 'in'
            val = kwds['run']
            qlist = []
            if  val != 'required':
                if  isinstance(val, dict):
                    min_run = 0
                    max_run = 0
                    if  val.has_key('$lte'):
                        max_run = val['$lte']
                    if  val.has_key('$gte'):
                        min_run = val['$gte']
                    if  min_run and max_run:
                        val = "run >=%s and run <= %s" % (min_run, max_run)
                    elif val.has_key('$in'):
                        arr = [r for r in val['$in']]
                        val = "run >=%s and run <= %s" % (arr[0], arr[-1])
                elif isinstance(val, int):
                    val = "run = %d" % val
                if  kwds.has_key('dataset') and kwds['dataset']:
                    val += ' and dataset=%s' % kwds['dataset']
                kwds['query'] = \
                "find dataset where %s and dataset.status like VALID*" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('run')
            kwds.pop('dataset')
        if  api == 'fakeRun4File':
            val = kwds['file']
            if  val != 'required':
                kwds['query'] = "find run where file = %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('file')
        if  api == 'fakeFiles4DatasetRunLumis':
            cond = ""
            val = kwds['dataset']
            if  val and val != 'required':
                cond = " and dataset=%s" % val
                kwds.pop('dataset')
            val = kwds['run']
            if  val and val != 'required':
                cond += " and run=%s" % val
                kwds.pop('run')
            val = kwds['lumi']
            if  val and val != 'required':
                cond += " and lumi=%s" % val
                kwds.pop('lumi')
            if  cond:
                kwds['query'] = "find file.name where %s" % cond[4:]
            else:
                kwds['query'] = 'required'
        if  api == 'fakeDatasetSummary':
            value = ""
            for key, val in kwds.iteritems():
                if  key == 'dataset' and val:
                    value += ' and dataset=%s' % val
                if  key == 'primary_dataset' and val:
                    value += ' and primds=%s' % val
                if  key == 'release' and val:
                    value += ' and release=%s' % val
                if  key == 'tier' and val:
                    value += ' and tier=%s' % val
                if  key == 'phygrp' and val:
                    value += ' and phygrp=%s' % val
                if  key == 'datatype' and val:
                    value += ' and datatype=%s' % val
                if  key == 'status':
                    if  val:
                        value += ' and dataset.status=%s' % val
                    else:
                        value += ' and dataset.status like VALID*'
            keys = ['dataset', 'release', 'primary_dataset', 'tier', \
                'phygrp', 'datatype', 'status']
            for key in keys:
                try:
                    del kwds[key]
                except:
                    pass
            if  value:
                kwds['query'] = "find dataset, datatype, dataset.status, \
dataset.tag, \
procds.createdate, procds.createby, procds.moddate, procds.modby, \
sum(block.numfiles), sum(block.numevents), count(block), sum(block.size) \
where %s" % value[4:]
            else:
                kwds['query'] = 'required'
        if  api == 'fakeListDatasetbyDate':
            value = ''
            if  kwds['status']:
                value = ' and dataset.status=%s' % kwds['status']
            else:
                value = ' and dataset.status like VALID*'
#           20110126/{'$lte': 20110126}/{'$lte': 20110126, '$gte': 20110124} 
            query_for_single = "find dataset, datatype, dataset.status, \
  dataset.tag, \
  count(block), sum(block.size), sum(block.numfiles), sum(block.numevents), \
  dataset.createdate where dataset.createdate %s %s " + value
            query_for_double = "find dataset, datatype, dataset.status, \
  dataset.tag, \
  count(block), sum(block.size), sum(block.numfiles), sum(block.numevents), \
  dataset.createdate where dataset.createdate %s %s \
  and dataset.createdate %s %s " + value
            val = kwds['date']
            qlist = []
            query = ""
            if val != "required":
                if isinstance(val, dict):
                    for opt in val:
                        nopt = dbsql_opt_map(opt)
                        if nopt == ('in'):
                            self.logger.debug(val[opt])
                            nval = [convert_datetime(x) for x in val[opt]]
                        else:
                            nval = convert_datetime(val[opt])
                        qlist.append(nopt)
                        qlist.append(nval)
                    if len(qlist) == 4:
                        query = query_for_double % tuple(qlist)
                    else:
                        msg = "dbs_services::fakeListDatasetbyDate \
 wrong params get, IN date is not support by DBS2 QL"
                        self.logger.info(msg)
                elif isinstance(val, int):
                    val = convert_datetime(val)
                    query = query_for_single % ('=', val)
                kwds['query'] = query
            else:
                kwds['query'] = 'required'
            kwds.pop('date')
コード例 #3
0
ファイル: dbs_service.py プロジェクト: ktf/DAS
    def adjust_params(self, api, kwds, inst=None):
        """
        Adjust DBS2 parameters for specific query requests
        To mimic DBS3 behavior we only allow dataset summary information
        for fakeDatasetSummary and fakeListDataset4Block APIs who uses
        full dataset and block name, respectively.
        """
        sitedb = SERVICES.get('sitedb2', None) # SiteDB from global scope
        if  api == 'fakeRun4Block':
            val = kwds['block']
            if  val != 'required':
                kwds['query'] = 'find run where block=%s' % val
            else:
                kwds['query'] = 'required'
            kwds.pop('block')
        if  api == 'fakeStatus':
            val = kwds['status']
            if  val:
                kwds['query'] = \
                'find dataset.status where dataset.status=%s' % val.upper()
            else:
                kwds['query'] = 'find dataset.status'
            val = kwds['dataset']
            if  val:
                if  kwds['query'].find(' where ') != -1:
                    kwds['query'] += ' and dataset=%s' % val
                else:
                    kwds['query'] += ' where dataset=%s' % val
            kwds.pop('status')
        if  api == 'listPrimaryDatasets':
            pat = kwds['pattern']
            if  pat[0] == '/':
                kwds['pattern'] = pat.split('/')[1]
        if  api == 'listProcessedDatasets':
            pat = kwds['processed_datatset_name_pattern']
            if  pat[0] == '/':
                try:
                    kwds['processed_datatset_name_pattern'] = pat.split('/')[2]
                except:
                    pass
        if  api == 'fakeReleases':
            val = kwds['release']
            if  val != 'required':
                kwds['query'] = 'find release where release=%s' % val
            else:
                kwds['query'] = 'required'
            kwds.pop('release')
        if  api == 'fakeRelease4File':
            val = kwds['file']
            if  val != 'required':
                kwds['query'] = 'find release where file=%s' % val
            else:
                kwds['query'] = 'required'
            kwds.pop('file')
        if  api == 'fakeRelease4Dataset':
            val = kwds['dataset']
            if  val != 'required':
                kwds['query'] = 'find release where dataset=%s' % val
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeConfig':
            val = kwds['dataset']
            sel = 'config.name, config.content, config.version, config.type, \
 config.annotation, config.createdate, config.createby, config.moddate, \
 config.modby'
            if  val != 'required':
                kwds['query'] = 'find %s where dataset=%s' % (sel, val)
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeSite4Dataset' and inst and inst != self.prim_instance:
            val = kwds['dataset']
            if  val != 'required':
                kwds['query'] = "find site where dataset=%s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeDataset4Site' and inst and inst != self.prim_instance:
            val = kwds['site']
            if  val != 'required':
                sinfo = sitedb.site_info(val)
                if  sinfo and 'resources' in sinfo:
                    for row in sinfo['resources']:
                        if  row['type'] == 'SE' and 'fqdn' in row:
                            sename = row['fqdn']
                            kwds['query'] = \
                                    "find dataset,site where site=%s" % sename
                            break
            else:
                kwds['query'] = 'required'
            kwds.pop('site')
        if  api == 'fakeListDataset4File':
            val = kwds['file']
            if  val != 'required':
                kwds['query'] = "find dataset, count(block), count(file.size) \
  where file=%s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('file')
        if  api == 'fakeListDataset4Block':
            val = kwds['block']
            if  val != 'required':
                kwds['query'] = "find dataset, count(block), \
  sum(block.size), sum(block.numfiles), sum(block.numevents) \
  where block=%s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('block')
        if  api == 'fakeRun4Run':
            val = kwds['run']
            if  val != 'required':
                if  isinstance(val, dict):
                    min_run = 0
                    max_run = 0
                    if  '$lte' in val:
                        max_run = val['$lte']
                    if  '$gte' in val:
                        min_run = val['$gte']
                    if  min_run and max_run:
                        val = "run >=%s and run <= %s" % (min_run, max_run)
                    elif '$in' in val:
                        val = ' or '.join(['run=%s' % r for r in val['$in']])
                        val = '(%s)' % val
                elif isinstance(val, int):
                    val = "run = %d" % val
                kwds['query'] = "find run where %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('run')
        if  api == 'fakeBlock4file':
            lfn = kwds.get('file', 'required')
            if  lfn != 'required':
                kwds['query'] = 'find block.name where file=%s' % lfn
            else:
                kwds['query'] = 'required'
        if  api == 'fakeLumis4block':
            block = kwds.get('block', 'required')
            if  block != 'required':
                kwds['query'] = \
                'find lumi.number, run.number, file.name where block=%s' % block
                kwds.pop('block')
            else:
                kwds['query'] = 'required'
        if  api == 'fakeLumis4FileRun':
            query = kwds.get('query', 'required')
            lfn = kwds.get('lfn', 'required')
            if  lfn != 'required':
                query = \
                'find lumi.number, run.number where file=%s' % lfn
                kwds.pop('lfn')
            run = kwds.get('run', 'optional')
            if  run != 'optional':
                query += ' and run=%s' % run
                kwds.pop('run')
            kwds['query'] = query
        if  api == 'fakeBlock4DatasetRun':
            dataset = kwds.get('dataset', 'required')
            if  dataset != 'required':
                kwds['query'] = 'find block.name where dataset=%s'\
                        % dataset
            else:
                kwds['query'] = 'required'
            val = kwds.get('run', 'required')
            if  val != 'required':
                if  isinstance(val, dict):
                    min_run = 0
                    max_run = 0
                    if  '$lte' in val:
                        max_run = val['$lte']
                    if  '$gte' in val:
                        min_run = val['$gte']
                    if  min_run and max_run:
                        val = "run >=%s and run <= %s" % (min_run, max_run)
                    elif '$in' in val:
                        val = ' or '.join(['run=%s' % r for r in val['$in']])
                        val = '(%s)' % val
                elif isinstance(val, int):
                    val = "run = %d" % val
                kwds['query'] += ' and ' + val
                kwds.pop('dataset')
                kwds.pop('run')
            else:
                kwds['query'] = 'required'
        if  api == 'fakeGroup4Dataset':
            val = kwds['dataset']
            if  val != 'required':
                val = "dataset = %s" % val
                kwds['query'] = "find phygrp where %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeChild4File':
            val = kwds['file']
            if  val != 'required':
                val = "file = %s" % val
                kwds['query'] = "find file.child where %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('file')
        if  api == 'fakeChild4Dataset':
            val = kwds['dataset']
            if  val != 'required':
                val = "dataset = %s" % val
                kwds['query'] = "find dataset.child where %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('dataset')
        if  api == 'fakeDataset4Run':
            val = kwds['run']
            qlist = []
            if  val != 'required':
                if  isinstance(val, dict):
                    min_run = 0
                    max_run = 0
                    if  '$lte' in val:
                        max_run = val['$lte']
                    if  '$gte' in val:
                        min_run = val['$gte']
                    if  min_run and max_run:
                        val = "run >=%s and run <= %s" % (min_run, max_run)
                    elif '$in' in val:
                        val = ' or '.join(['run=%s' % r for r in val['$in']])
                        val = '(%s)' % val
                elif isinstance(val, int):
                    val = "run = %d" % val
                if  'dataset' in kwds and kwds['dataset']:
                    val += ' and dataset=%s' % kwds['dataset']
                kwds['query'] = \
                "find dataset where %s and dataset.status like VALID*" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('run')
            kwds.pop('dataset')
        if  api == 'fakeDataset4User':
            user = kwds['user']
            if  user == 'required':
                kwds['query'] = 'required'
            else:
                val = sitedb.user_dn(kwds['user'])
                if  val:
                    # DBS-QL does not allow = or spaces, so we'll tweak the DN
                    val = val.replace('=', '*').replace(' ', '*')
                    kwds['query'] = "find dataset, dataset.createby " + \
                            "where dataset.createby=%s" % val
                    if  'dataset' in kwds and kwds['dataset']:
                        kwds['query'] += ' and dataset=%s' % kwds['dataset']
                else:
                    kwds['query'] = 'required'
            kwds.pop('user')
            kwds.pop('dataset')
        if  api == 'fakeRun4File':
            val = kwds['file']
            if  val != 'required':
                kwds['query'] = "find run where file = %s" % val
            else:
                kwds['query'] = 'required'
            kwds.pop('file')
        if  api == 'fakeFiles4DatasetRunLumis':
            cond = ""
            val = kwds['dataset']
            if  val and val != 'required':
                cond = " and dataset=%s" % val
                kwds.pop('dataset')
            val = kwds['run']
            if  val and val != 'required':
                cond += " and run=%s" % val
                kwds.pop('run')
            val = kwds['lumi']
            if  val and val != 'required':
                cond += " and lumi=%s" % val
                kwds.pop('lumi')
            if  cond:
                kwds['query'] = "find file.name where %s" % cond[4:]
            else:
                kwds['query'] = 'required'
        if  api == 'fakeDatasetSummary' or api == 'fakeDatasetPattern':
            value = ""
            path = False
            for key, val in kwds.iteritems():
                if  key == 'dataset' and val:
                    value += ' and dataset=%s' % val
                    if  len(val.split('/')) == 4: # /a/b/c -> ['','a','b','c']
                        if  val.find('*') == -1:
                            path = True
                if  key == 'primary_dataset' and val:
                    value += ' and primds=%s' % val
                if  key == 'release' and val:
                    value += ' and release=%s' % val
                if  key == 'tier' and val:
                    value += ' and tier=%s' % val
                if  key == 'phygrp' and val:
                    value += ' and phygrp=%s' % val
                if  key == 'datatype' and val:
                    value += ' and datatype=%s' % val
                if  api == 'fakeDatasetPattern':
                    if  key == 'status':
                        if  val:
                            value += ' and dataset.status=%s' % val.upper()
                        else:
                            value += ' and dataset.status like VALID*'
            keys = ['dataset', 'release', 'primary_dataset', 'tier', \
                'phygrp', 'datatype', 'status']
            for key in keys:
                try:
                    del kwds[key]
                except:
                    pass
            if  value:
                query  = "find dataset, datatype, dataset.status, dataset.tag"
                query += ", procds.createdate, procds.createby, procds.moddate"
                query += ", procds.modby"
                if  path: # we have full path, ask for summary information
                    query += ", sum(block.numfiles), sum(block.numevents)"
                    query += ", count(block), sum(block.size)"
                query += " where %s" % value[4:]
                kwds['query'] = query
            else:
                kwds['query'] = 'required'
        if  api == 'fakeListDatasetbyDate':
            value = ''
            if  kwds['status']:
                value = ' and dataset.status=%s' % kwds['status'].upper()
            else:
                value = ' and dataset.status like VALID*'
#           20110126/{'$lte': 20110126}/{'$lte': 20110126, '$gte': 20110124}
            query_for_single = "find dataset, datatype, dataset.status, \
  dataset.tag, \
  dataset.createdate where dataset.createdate %s %s " + value
            query_for_double = "find dataset, datatype, dataset.status, \
  dataset.tag, \
  dataset.createdate where dataset.createdate %s %s \
  and dataset.createdate %s %s " + value
            val = kwds['date']
            qlist = []
            query = ""
            if val != "required":
                if isinstance(val, dict):
                    for opt in val:
                        nopt = dbsql_opt_map(opt)
                        if nopt == ('in'):
                            self.logger.debug(val[opt])
                            nval = [convert_datetime(x) for x in val[opt]]
                        else:
                            nval = convert_datetime(val[opt])
                        qlist.append(nopt)
                        qlist.append(nval)
                    if len(qlist) == 4:
                        query = query_for_double % tuple(qlist)
                    else:
                        msg = "dbs_services::fakeListDatasetbyDate \
 wrong params get, IN date is not support by DBS2 QL"
                        self.logger.info(msg)
                elif isinstance(val, int):
                    val = convert_datetime(val)
                    query = query_for_single % ('=', val)
                kwds['query'] = query
            else:
                kwds['query'] = 'required'
            kwds.pop('date')
        if  api == 'listFiles':
            val = kwds.get('run_number', None)
            if  isinstance(val, dict):
                # listFiles does not support run range, see
                # fakeFiles4DatasetRun API
                kwds['run_number'] = 'required'
            if  not kwds['path'] and not kwds['block_name'] and \
                not kwds['pattern_lfn']:
                kwds['path'] = 'required'
        if  api == 'fakeFiles4DatasetRun' or api == 'fakeFiles4BlockRun':
            cond = ""
            entity = 'dataset'
            if  api == 'fakeFiles4BlockRun':
                entity = 'block'
            val = kwds[entity]
            if  val and val != 'required':
                cond = " and %s=%s" % (entity, val)
                kwds.pop(entity)
            val = kwds['run']
            if  val and val != 'required':
                if  isinstance(val, dict):
                    min_run = 0
                    max_run = 0
                    if  '$lte' in val:
                        max_run = val['$lte']
                    if  '$gte' in val:
                        min_run = val['$gte']
                    if  min_run and max_run:
                        val = "run >=%s and run <= %s" % (min_run, max_run)
                    elif '$in' in val:
                        val = ' or '.join(['run=%s' % r for r in val['$in']])
                        val = '(%s)' % val
                elif isinstance(val, int):
                    val = "run = %d" % val
                cond += " and %s" % val
                kwds.pop('run')
            if  cond:
                kwds['query'] = "find file.name where %s" % cond[4:]
            else:
                kwds['query'] = 'required'
コード例 #4
0
ファイル: runregistry_service.py プロジェクト: zdenekmaxa/DAS
 def apicall(self, dasquery, url, api, args, dformat, expire):
     """
     A service worker. It parses input query, invoke service API 
     and return results in a list with provided row.
     """
     _query  = ""
     for key, val in dasquery.mongo_query['spec'].iteritems():
         if  key == 'run.run_number':
             if  isinstance(val, int):
                 _query = {'runNumber': '%s' % val}
             elif isinstance(val, dict):
                 minrun = 0
                 maxrun = 0
                 for kkk, vvv in val.iteritems():
                     if  kkk == '$in':
                         if len(vvv) == 2:
                             minrun, maxrun = vvv
                         else: # in[1, 2, 3]
                             msg = "runregistry can not deal with 'in'"
                             self.logger.info(msg)
                             continue
                     elif kkk == '$lte':
                         maxrun = vvv
                     elif kkk == '$gte':
                         minrun = vvv
                 _query = {'runNumber': '>= %s and < %s' % (minrun, maxrun)}
         elif key == 'date':
             if  isinstance(val, dict):
                 if  val.has_key('$in'):
                     value = val['$in']
                 elif val.has_key('$lte') and val.has_key('$gte'):
                     value = (val['$gte'], val['$lte'])
                 else:
                     msg = 'Unable to get the value from %s=%s' \
                             % (key, val) 
                     raise Exception(msg)
                 try:
                     date1 = convert_datetime(value[0])
                     date2 = convert_datetime(value[-1])
                 except:
                     msg = 'Unable to convert to datetime format, %s' \
                         % value
                     raise Exception(msg)
             elif  isinstance(val, str) or isinstance(val, unicode):
                 date1, date2 = convert2date(val)
                 date1 = rr_date(date1)
                 date2 = rr_date(date2)
             else:
                 date1 = convert_datetime(val)
                 date2 = convert_datetime(val + 24*60*60)
             run_time = '>= %s and < %s' % (date1, date2)
             _query = {'runStartTime': run_time}
         else:
             msg  = 'RunRegistryService::api\n\n'
             msg += "--- %s reject API %s, parameters don't match, args=%s" \
                     % (self.name, api, args)
             self.logger.info(msg)
             return
     if  not _query:
         msg = 'Unable to match input parameters with input query'
         raise Exception(msg)
     if  args.has_key('run') and isinstance(args['run'], dict):
         args['run'] = str(args['run'])
     msg = "DASAbstractService:RunRegistry, query=%s" % _query
     self.logger.info(msg)
     time0   = time.time()
     api_ver = 3 # API version for RunRegistry, v2 is xmlrpc, v3 is REST
     rawrows = worker(url, _query, api_ver)
     genrows = self.translator(api, rawrows)
     dasrows = self.set_misses(dasquery, api, run_duration(genrows, api_ver))
     ctime   = time.time() - time0
     try:
         self.write_to_cache(\
             dasquery, expire, url, api, args, dasrows, ctime)
     except Exception as exc:
         print_exc(exc)
コード例 #5
0
ファイル: runregistry_service.py プロジェクト: perrozzi/DAS
 def apicall(self, dasquery, url, api, args, dformat, expire):
     """
     A service worker. It parses input query, invoke service API 
     and return results in a list with provided row.
     """
     _query = ''
     _table = 'runsummary'
     if api == 'rr_xmlrpc_lumis':
         _table = 'runlumis'
     for key, val in dasquery.mongo_query['spec'].items():
         if key == 'run.run_number':
             if isinstance(val, int):
                 _query = {'runNumber': '%s' % val}
             elif isinstance(val, dict):
                 minrun = 0
                 maxrun = 0
                 for kkk, vvv in val.items():
                     if kkk == '$in':
                         runs = ' or '.join([str(r) for r in vvv])
                         _query = {'runNumber': runs}
                     elif kkk == '$lte':
                         maxrun = vvv
                     elif kkk == '$gte':
                         minrun = vvv
                 if minrun and maxrun:
                     _query = {
                         'runNumber': '>= %s and < %s' % (minrun, maxrun)
                     }
         elif key == 'date':
             if isinstance(val, dict):
                 if '$in' in val:
                     value = val['$in']
                 elif '$lte' in val and '$gte' in val:
                     value = (val['$gte'], val['$lte'])
                 else:
                     msg = 'Unable to get the value from %s=%s' \
                             % (key, val)
                     raise Exception(msg)
                 try:
                     date1 = convert_datetime(value[0])
                     date2 = convert_datetime(value[-1])
                 except:
                     msg = 'Unable to convert to datetime format, %s' \
                         % value
                     raise Exception(msg)
             elif isinstance(val, str) or isinstance(val, unicode):
                 date1, date2 = convert2date(val)
                 date1 = rr_date(date1)
                 date2 = rr_date(date2)
             else:
                 date1 = convert_datetime(val)
                 date2 = convert_datetime(val + 24 * 60 * 60)
             run_time = '>= %s and < %s' % (date1, date2)
             _query = {'runStartTime': run_time}
         else:
             msg = 'RunRegistryService::api\n\n'
             msg += "--- %s reject API %s, parameters don't match, args=%s" \
                     % (self.name, api, args)
             self.logger.info(msg)
             return
     if not _query:
         msg = 'Unable to match input parameters with input query'
         raise Exception(msg)
     if 'run' in args and isinstance(args['run'], dict):
         args['run'] = str(args['run'])
     msg = "DASAbstractService:RunRegistry, query=%s" % _query
     self.logger.info(msg)
     time0 = time.time()
     rawrows = rr_worker(url, _query, _table)
     genrows = self.translator(api, rawrows)
     if _table == 'runsummary':
         dasrows = run_duration(genrows)
     else:
         dasrows = collect_lumis(genrows)
     ctime = time.time() - time0
     try:
         self.write_to_cache(\
             dasquery, expire, url, api, args, dasrows, ctime)
     except Exception as exc:
         print_exc(exc)