Пример #1
0
Файл: task.py Проект: ktf/DAS
 def __init__(self, name, classname, master_id, kwargs,
              index, interval, parent):
     self.classname = classname
     self.kwargs = deepcopy(kwargs)
     self.name = name
     self.index = index
     self.interval = interval
     self.parent = deepcopy(parent)
     self.master_id = master_id
     self.logger = None
Пример #2
0
    def pattern_query(self):
        """
        Patter property for DAS query whose spec is modified
        to regexes and $exists keys
        """
        if not self._pattern_query:
            nmq = deepcopy(self.loose_query)

            def edit_dict(old):
                """
                Inner recursive dictionary manipulator
                """
                result = {}
                for key, val in old.items():
                    if isinstance(val, basestring):
                        if '*' in val:
                            if len(val) == 1:
                                result[key] = {'$exists': True}
                            else:
                                result[key] = \
                                    re.compile('^%s' % val.replace('*', '.*'))
                    elif isinstance(val, dict):
                        result[key] = edit_dict(val)
                    else:
                        result[key] = val
                return result

            nmq['spec'] = edit_dict(nmq['spec'])
            self._pattern_query = nmq
        return self._pattern_query
Пример #3
0
 def pattern_query(self):
     """
     Patter property for DAS query whose spec is modified
     to regexes and $exists keys
     """
     if  not self._pattern_query:
         nmq = deepcopy(self.loose_query)
         
         def edit_dict(old):
             """
             Inner recursive dictionary manipulator
             """
             result = {}
             for key, val in old.items():
                 if isinstance(val, basestring):
                     if  '*' in val:
                         if len(val) == 1:
                             result[key] = {'$exists': True}
                         else:
                             result[key] = \
                                 re.compile('^%s' % val.replace('*', '.*'))
                 elif isinstance(val, dict):
                     result[key] = edit_dict(val)
                 else:
                     result[key] = val
             return result
         
         nmq['spec'] = edit_dict(nmq['spec'])    
         self._pattern_query = nmq
     return self._pattern_query
Пример #4
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.db = "test_mapping.db"
        config = deepcopy(das_readconfig())
        dburi = config["mongodb"]["dburi"]
        logger = PrintManager("TestDASMapping", verbose=debug)
        config["logger"] = logger
        config["verbose"] = debug
        dbname = "test_mapping"
        collname = "db"
        config["mappingdb"] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {
            "presentation": {
                "block": [{"ui": "Block name", "das": "block.name"}, {"ui": "Block size", "das": "block.size"}]
            },
            "type": "presentation",
        }
        self.coll.insert(self.pmap)
        ver_token = verification_token(self.coll.find(exhaust=True))
        rec = {"verification_token": ver_token, "type": "verification_token"}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)
Пример #5
0
 def mongo_query(self):
     """
     Read only mongo query, generated on demand.
     """
     system = self._mongo_query.get('system', [])
     filters = self._mongo_query.get('filters', {})
     aggregators = self._mongo_query.get('aggregators', [])
     if  not self._mongo_query:
         self._mongo_query = deepcopy(self.storage_query)
         for key, val in self._mongo_query.items():
             if  key not in ['fields', 'spec']:
                 setattr(self, '_%s' % key, val)
         spec = {}
         for item in self._mongo_query.pop('spec'):
             val = json.loads(item['value'])
             if  'pattern' in item:
                 val = re.compile(val)
             spec.update({item['key'] : val})
         self._mongo_query['spec'] = spec
     # special case when user asks for all records
     fields = self._mongo_query.get('fields', None)
     if  fields and fields == ['records']:
         self._mongo_query['fields'] = None
         spec = {}
         for key, val in self._mongo_query['spec'].items():
             if  key != 'records':
                 spec[key] = val
         self._mongo_query = dict(fields=None, spec=spec)
     if  filters:
         self._mongo_query.update({'filters':filters})
     if  aggregators:
         self._mongo_query.update({'aggregators':aggregators})
     if  system:
         self._mongo_query.update({'system':system})
     return self._mongo_query
Пример #6
0
 def pagination(self, total, incache, kwds):
     """
     Construct pagination part of the page. It accepts total as a
     total number of result as well as dict of kwargs which
     contains idx/limit/query/input parameters, as well as other
     parameters used in URL by end-user.
     """
     kwargs  = deepcopy(kwds)
     if  kwargs.has_key('dasquery'):
         del kwargs['dasquery'] # we don't need it
     idx     = getarg(kwargs, 'idx', 0)
     limit   = getarg(kwargs, 'limit', 10)
     uinput  = getarg(kwargs, 'input', '')
     page    = ''
     if  total > 0:
         params = {} # will keep everything except idx/limit
         for key, val in kwargs.iteritems():
             if  key != 'idx' and key != 'limit' and key != 'query':
                 params[key] = val
         url   = "%s/request?%s" \
                 % (self.base, urllib.urlencode(params, doseq=True))
         page += self.templatepage('das_pagination', \
             nrows=total, idx=idx, limit=limit, url=url)
     else:
         page = self.templatepage('das_noresults', query=uinput,
                     incache=incache)
     return page
Пример #7
0
 def mongo_query(self):
     """
     Read only mongo query, generated on demand.
     """
     system = self._mongo_query.get('system', [])
     filters = self._mongo_query.get('filters', {})
     aggregators = self._mongo_query.get('aggregators', [])
     if not self._mongo_query:
         self._mongo_query = deepcopy(self.storage_query)
         for key, val in self._mongo_query.items():
             if key not in ['fields', 'spec']:
                 setattr(self, '_%s' % key, val)
         spec = {}
         for item in self._mongo_query.pop('spec'):
             val = json.loads(item['value'])
             if 'pattern' in item:
                 val = re.compile(val)
             spec.update({item['key']: val})
         self._mongo_query['spec'] = spec
     # special case when user asks for all records
     fields = self._mongo_query.get('fields', None)
     if fields and fields == ['records']:
         self._mongo_query['fields'] = None
         spec = {}
         for key, val in self._mongo_query['spec'].items():
             if key != 'records':
                 spec[key] = val
         self._mongo_query = dict(fields=None, spec=spec)
     if filters:
         self._mongo_query.update({'filters': filters})
     if aggregators:
         self._mongo_query.update({'aggregators': aggregators})
     if system:
         self._mongo_query.update({'system': system})
     return self._mongo_query
Пример #8
0
 def to_mongo_id(self):
     """
     Return a new DASQuery where the _id has been converted
     (as adjust_id) to a mongo ObjectId
     """
     nmq = deepcopy(self.mongo_query)
     if self.has_id():
         val = self.mongo_query['spec']['_id']
         if isinstance(val, str):
             nmq['spec']['_id'] = ObjectId(val)
         elif isinstance(val, unicode):
             nmq['spec']['_id'] = ObjectId(unicode.encode(val))
         elif isinstance(val, list):
             result = []
             for item in val:
                 if isinstance(val, str):
                     result.append(ObjectId(item))
                 elif isinstance(val, unicode):
                     result.append(ObjectId(unicode.encode(item)))
                 else:
                     raise Exception("non str|unicode _id.child")
             nmq['spec']['_id'] = result
         else:
             raise Exception("non str|unicode|list _id")
     return DASQuery(nmq, **self._flags)
Пример #9
0
 def to_mongo_id(self):
     """
     Return a new DASQuery where the _id has been converted
     (as adjust_id) to a mongo ObjectId
     """
     nmq = deepcopy(self.mongo_query)
     if self.has_id():
         val = self.mongo_query['spec']['_id']
         if isinstance(val, str):
             nmq['spec']['_id'] = ObjectId(val)
         elif isinstance(val, unicode):
             nmq['spec']['_id'] = ObjectId(unicode.encode(val))
         elif isinstance(val, list):
             result = []
             for item in val:
                 if isinstance(val, str):
                     result.append(ObjectId(item))
                 elif isinstance(val, unicode):
                     result.append(ObjectId(unicode.encode(item)))
                 else:
                     raise Exception("non str|unicode _id.child")
             nmq['spec']['_id'] = result
         else:
             raise Exception("non str|unicode|list _id")
     return DASQuery(nmq, **self._flags)
Пример #10
0
Файл: utils_t.py Проект: ktf/DAS
    def test_deepcopy(self):
        """Test deepcopy function"""
        query = {'fields': ['release'], 
                 'spec': {u'release.name': 'CMSSW_2_0_8',
                 'das.primary_key': {'$in': [u'release.name']},
                 'das.condition_keys': [u'release.name']}}
        obj = deepcopy(query)
        self.assertEqual(query, obj)
        del query['spec']['das.primary_key']
        self.assertNotEqual(query, obj)

        obj = 1
        self.assertEqual(deepcopy(obj), obj)
        obj = (1,2)
        self.assertEqual(deepcopy(obj), obj)
        obj = [1,2]
        self.assertEqual(deepcopy(obj), obj)
Пример #11
0
 def setUp(self):
     """
     set up stuff
     """
     self.debug  = 0
     dasconfig   = deepcopy(das_readconfig())
     self.dburi  = dasconfig['mongodb']['dburi']
     self.reqmgr = RequestManager(self.dburi)
Пример #12
0
 def setUp(self):
     """
     set up DAS core module
     """
     self.debug  = 0
     dasconfig   = deepcopy(das_readconfig())
     self.dburi  = dasconfig['mongodb']['dburi']
     self.dbhost = 'localhost'
     self.dbport = 27017
Пример #13
0
 def setUp(self):
     """
     set up DAS core module
     """
     debug    = 0
     config   = deepcopy(das_readconfig())
     logger   = PrintManager('TestDASCache', verbose=debug)
     config['logger']  = logger
     config['verbose'] = debug
Пример #14
0
def get_mongo_query(query):
    "Get DAS query in MongoDB format and remove DAS look-up keys from it"
    mongoquery = decode_mongo_query(deepcopy(query))
    if  isinstance(mongoquery, dict) and mongoquery.has_key('spec'):
        for key in mongoquery['spec'].keys():
            if  key.find('das') != -1:
                # remove DAS keys, e.g. das.primary_key
                del mongoquery['spec'][key]
    return mongoquery
Пример #15
0
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     config = deepcopy(das_readconfig())
     logger = PrintManager('TestDASCache', verbose=debug)
     config['logger'] = logger
     config['verbose'] = debug
Пример #16
0
def get_analytics_interface():
    """
    Factory function to get a standalone interface to DASAnalytics without
    loading the rest of DAS, that logs to our global logger.
    """
    global DAS_CONFIG
    config = deepcopy(DAS_CONFIG)
    config['logger'] = logging.getLogger("DASAnalytics.AnalyticsDB")
    return DASAnalytics(config)
Пример #17
0
 def setUp(self):
     """
     set up DAS core module
     """
     self.debug = 0
     dasconfig = deepcopy(das_readconfig())
     self.dburi = dasconfig['mongodb']['dburi']
     self.dbhost = 'localhost'
     self.dbport = 27017
Пример #18
0
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     self.das = DASCore(debug=debug)
     config = deepcopy(das_readconfig())
     dburi = config['mongodb']['dburi']
     connection = Connection(dburi)
     connection.drop_database('das') 
Пример #19
0
 def testConfig(self):                          
     """test read/write of configuration file"""
     if  os.environ.has_key('DAS_CONFIG'):
         del os.environ['DAS_CONFIG']
     fds = NamedTemporaryFile()
     os.environ['DAS_CONFIG'] = fds.name
     dasconfig = das_configfile()
     write_configparser(dasconfig, True)
     readdict = deepcopy(das_readconfig())
     self.assertEqual(types.DictType, type(readdict))
Пример #20
0
 def testConfig(self):                          
     """test read/write of configuration file"""
     if  'DAS_CONFIG' in os.environ:
         del os.environ['DAS_CONFIG']
     fds = NamedTemporaryFile()
     os.environ['DAS_CONFIG'] = fds.name
     dasconfig = das_configfile()
     write_configparser(dasconfig, True)
     readdict = deepcopy(das_readconfig())
     self.assertEqual(dict, type(readdict))
Пример #21
0
 def to_bare_query(self):
     """
     Return a new query containing only field and spec keys of this query.
     May be identical if this is already a bare query.
     """
     if  self.is_bare_query():
         return self
     mongo_query = {'fields': copy.deepcopy(self.mongo_query['fields']),
             'spec': deepcopy(self.mongo_query['spec'])}
     return mongo_query
Пример #22
0
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     self.das = DASCore(debug=debug, multitask=False)
     config = deepcopy(das_readconfig())
     dburi = config['mongodb']['dburi']
     connection = MongoClient(dburi)
     connection.drop_database('das') 
Пример #23
0
 def qhash(self):
     """
     Read only qhash, generated on demand.
     """
     if not self._qhash:
         sdict = deepcopy(self.storage_query)
         for key in ['filters', 'aggregators', 'mapreduce']:
             if key in sdict:
                 del sdict[key]
         self._qhash = genkey(sdict)
     return self._qhash
Пример #24
0
 def qhash(self):
     """
     Read only qhash, generated on demand.
     """
     if  not self._qhash:
         sdict = deepcopy(self.storage_query)
         for key in ['filters', 'aggregators', 'mapreduce']:
             if  key in sdict:
                 del sdict[key]
         self._qhash = genkey(sdict)
     return self._qhash
Пример #25
0
 def to_bare_query(self):
     """
     Return a new query containing only field and spec keys of this query.
     May be identical if this is already a bare query.
     """
     if self.is_bare_query():
         return self
     mongo_query = {
         'fields': copy.deepcopy(self.mongo_query['fields']),
         'spec': deepcopy(self.mongo_query['spec'])
     }
     return mongo_query
Пример #26
0
 def setUp(self):
     """
     set up DAS core module
     """
     debug    = 0
     self.db  = 'test_analytics.db'
     config   = deepcopy(das_readconfig())
     dburi    = config['mongodb']['dburi']
     logger   = PrintManager('TestDASAnalytics', verbose=debug)
     config['logger']  = logger
     config['verbose'] = debug
     config['analyticsdb'] = dict(dburi=dburi, history=5184000,
             dbname='test_analytics', collname='db')
     self.mgr = DASAnalytics(config)
Пример #27
0
    def test_deepcopy(self):
        """Test deepcopy function"""
        query = {
            'fields': ['release'],
            'spec': {
                u'release.name': 'CMSSW_2_0_8',
                'das.primary_key': {
                    '$in': [u'release.name']
                },
                'das.condition_keys': [u'release.name']
            }
        }
        obj = deepcopy(query)
        self.assertEqual(query, obj)
        del query['spec']['das.primary_key']
        self.assertNotEqual(query, obj)

        obj = 1
        self.assertEqual(deepcopy(obj), obj)
        obj = (1, 2)
        self.assertEqual(deepcopy(obj), obj)
        obj = [1, 2]
        self.assertEqual(deepcopy(obj), obj)
Пример #28
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug    = 0
        config   = deepcopy(das_readconfig())
        logger   = PrintManager('TestDASMongocache', verbose=debug)
        config['logger']  = logger
        config['verbose'] = debug
        dburi    = config['mongodb']['dburi']

        connection = Connection(dburi)
        connection.drop_database('das') 
        dasmapping = DASMapping(config)
        config['dasmapping'] = dasmapping
        self.dasmongocache = DASMongocache(config)
Пример #29
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        config = deepcopy(das_readconfig())
        logger = PrintManager('TestDASMongocache', verbose=debug)
        config['logger'] = logger
        config['verbose'] = debug
        dburi = config['mongodb']['dburi']

        connection = MongoClient(dburi)
        connection.drop_database('das')
        dasmapping = DASMapping(config)
        config['dasmapping'] = dasmapping
        self.dasmongocache = DASMongocache(config)
Пример #30
0
 def pagination(self, head):
     """
     Construct pagination part of the page. It accepts total as a
     total number of result as well as dict of kwargs which
     contains idx/limit/query/input parameters, as well as other
     parameters used in URL by end-user.
     """
     kwds = head.get('args')
     total = head.get('nresults')
     apilist = head.get('apilist')
     kwargs = deepcopy(kwds)
     if 'dasquery' in kwargs:
         del kwargs['dasquery']  # we don't need it
     idx = getarg(kwargs, 'idx', 0)
     limit = getarg(kwargs, 'limit', 50)
     uinput = getarg(kwargs, 'input', '')
     skip_args = ['status', 'error', 'reason']
     page = ''
     if datasetPattern(uinput):
         msg = 'By default DAS show dataset with <b>VALID</b> status. '
         msg += 'To query all datasets regardless of their status please use'
         msg += '<span class="example">dataset %s status=*</span> query' % uinput
         msg += ' or use proper status value, e.g. PRODUCTION'
         page += '<div>%s</div><br/>' % msg
     if total and total > 0:
         params = {}  # will keep everything except idx/limit
         for key, val in kwargs.items():
             if key in skip_args:
                 continue
             if key != 'idx' and key != 'limit' and key != 'query':
                 params[key] = val
         url   = "%s/request?%s" \
                 % (self.base, urllib.urlencode(params, doseq=True))
         page += self.templatepage('das_pagination', \
             nrows=total, idx=idx, limit=limit, url=url, cgi=cgi, str=str)
     else:
         # distinguish the case when no results vs no API calls
         info = head.get('das_server', None)
         info = pprint.pformat(info) if info else None
         page = self.templatepage('das_noresults',
                                  query=uinput,
                                  time=time,
                                  status=head.get('status', None),
                                  reason=head.get('reason', None),
                                  info=info,
                                  apilist=head.get('apilist', None))
     return page
Пример #31
0
 def loose_query(self):
     """
     Construct loose version of the query. That means add 
     pattern '*' to string type values for all conditions.
     """
     if not self._loose_query:
         query = deepcopy(self.mongo_query)
         spec = query.get('spec', {})
         fields = query.get('fields', None)
         newspec = {}
         for key, val in spec.items():
             if  key != '_id' and \
             isinstance(val, str) or isinstance(val, unicode):
                 if val[-1] != '*':
                     val += '*'  # add pattern
             newspec[key] = val
         self._loose_query = dict(spec=newspec, fields=fields)
     return self._loose_query
Пример #32
0
 def loose_query(self):
     """
     Construct loose version of the query. That means add 
     pattern '*' to string type values for all conditions.
     """
     if  not self._loose_query:
         query   = deepcopy(self.mongo_query)
         spec    = query.get('spec', {})
         fields  = query.get('fields', None)
         newspec = {}
         for key, val in spec.items():
             if  key != '_id' and \
             isinstance(val, str) or isinstance(val, unicode):
                 if  val[-1] != '*':
                     val += '*' # add pattern
             newspec[key] = val
         self._loose_query = dict(spec=newspec, fields=fields)
     return self._loose_query
Пример #33
0
 def storage_query(self):
     """
     Read only storage query, generated on demand.
     """
     if  not self._storage_query:
         self._storage_query = deepcopy(self.mongo_query)
         speclist = []
         for key, val in self._storage_query.pop('spec').items():
             if  str(type(val)) == "<type '_sre.SRE_Pattern'>":
                 val = json.dumps(val.pattern)
                 speclist.append({"key":key, "value":val, "pattern":1})
             elif isinstance(val, ObjectId):
                 speclist.append({"key":key, "value":str(val)})
             else:
                 val = json.dumps(val)
                 speclist.append({"key":key, "value":val})
         self._storage_query['spec'] = speclist
     return self._storage_query
Пример #34
0
 def storage_query(self):
     """
     Read only storage query, generated on demand.
     """
     if not self._storage_query:
         self._storage_query = deepcopy(self.mongo_query)
         speclist = []
         for key, val in self._storage_query.pop('spec').items():
             if str(type(val)) == "<type '_sre.SRE_Pattern'>":
                 val = json.dumps(val.pattern)
                 speclist.append({"key": key, "value": val, "pattern": 1})
             elif isinstance(val, ObjectId):
                 speclist.append({"key": key, "value": str(val)})
             else:
                 val = json.dumps(val)
                 speclist.append({"key": key, "value": val})
         self._storage_query['spec'] = speclist
     return self._storage_query
Пример #35
0
 def pagination(self, head):
     """
     Construct pagination part of the page. It accepts total as a
     total number of result as well as dict of kwargs which
     contains idx/limit/query/input parameters, as well as other
     parameters used in URL by end-user.
     """
     kwds    = head.get('args')
     total   = head.get('nresults')
     apilist = head.get('apilist')
     kwargs  = deepcopy(kwds)
     if  'dasquery' in kwargs:
         del kwargs['dasquery'] # we don't need it
     idx     = getarg(kwargs, 'idx', 0)
     limit   = getarg(kwargs, 'limit', 50)
     uinput  = getarg(kwargs, 'input', '')
     skip_args = ['status', 'error', 'reason']
     page    = ''
     if  datasetPattern(uinput):
         msg = 'By default DAS show dataset with <b>VALID</b> status. '
         msg += 'To query all datasets regardless of their status please use'
         msg += '<span class="example">dataset %s status=*</span> query' % uinput
         msg += ' or use proper status value, e.g. PRODUCTION'
         page += '<div>%s</div><br/>' % msg
     if  total and total > 0:
         params = {} # will keep everything except idx/limit
         for key, val in kwargs.items():
             if  key in skip_args:
                 continue
             if  key != 'idx' and key != 'limit' and key != 'query':
                 params[key] = val
         url   = "%s/request?%s" \
                 % (self.base, urllib.urlencode(params, doseq=True))
         page += self.templatepage('das_pagination', \
             nrows=total, idx=idx, limit=limit, url=url, cgi=cgi, str=str)
     else:
         # distinguish the case when no results vs no API calls
         info = head.get('das_server', None)
         info = pprint.pformat(info) if info else None
         page = self.templatepage('das_noresults', query=uinput, time=time,
                 status=head.get('status', None),
                 reason=head.get('reason', None),
                 info=info, apilist=head.get('apilist', None))
     return page
Пример #36
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug    = 0
        self.db  = 'test_mapping.db'
        config   = deepcopy(das_readconfig())
        dburi    = config['mongodb']['dburi']
        logger   = PrintManager('TestDASMapping', verbose=debug)
        config['logger']  = logger
        config['verbose'] = debug
        dbname   = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {"presentation": {"block":[{"ui": "Block name", "das": "block.name"},
            {"ui": "Block size", "das": "block.size"}]}, "type": "presentation"}
        self.coll.insert(self.pmap)

        url     = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader/acquisitioneras/'
        dformat = 'JSON'
        system  = 'dbs3'
        expire  = 100
        rec = {'system':system, 'urn': 'acquisitioneras', 'format':dformat,
            'instances': ['prod/global'],
            'url':url, 'expire': expire, 'lookup': 'era',
            'params' : {},
             'das_map': [
                 {"das_key": "era", "rec_key":"era.name", "api_arg":"era"}
                 ],
             'type': 'service'
        }
        self.coll.insert(rec)

        ver_token = verification_token(self.coll.find(**PYMONGO_OPTS))
        rec = {'verification_token':ver_token, 'type':'verification_token'}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)
Пример #37
0
 def pagination(self, head):
     """
     Construct pagination part of the page. It accepts total as a
     total number of result as well as dict of kwargs which
     contains idx/limit/query/input parameters, as well as other
     parameters used in URL by end-user.
     """
     kwds    = head.get('args')
     total   = head.get('nresults')
     apilist = head.get('apilist')
     kwargs  = deepcopy(kwds)
     if  'dasquery' in kwargs:
         del kwargs['dasquery'] # we don't need it
     idx     = getarg(kwargs, 'idx', 0)
     limit   = getarg(kwargs, 'limit', 10)
     uinput  = getarg(kwargs, 'input', '')
     skip_args = ['status', 'error', 'reason']
     page    = ''
     if  total > 0:
         params = {} # will keep everything except idx/limit
         for key, val in kwargs.iteritems():
             if  key in skip_args:
                 continue
             if  key != 'idx' and key != 'limit' and key != 'query':
                 params[key] = val
         url   = "%s/request?%s" \
                 % (self.base, urllib.urlencode(params, doseq=True))
         page += self.templatepage('das_pagination', \
             nrows=total, idx=idx, limit=limit, url=url)
     else:
         # distinguish the case when no results vs no API calls
         info = head.get('das_server', None)
         info = pprint.pformat(info) if info else None
         page = self.templatepage('das_noresults', query=uinput,
                 status=head.get('status', None),
                 reason=head.get('reason', None),
                 info=info, apilist=head.get('apilist', None))
     return page
Пример #38
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug    = 0
        self.db  = 'test_mapping.db'
        config   = deepcopy(das_readconfig())
        dburi    = config['mongodb']['dburi']
        logger   = PrintManager('TestDASMapping', verbose=debug)
        config['logger']  = logger
        config['verbose'] = debug
        dbname   = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = Connection(dburi)
        conn.drop_database(dbname)
        coll = conn[dbname][collname]
        self.pmap = {"presentation": {"block":[{"ui": "Block name", "das": "block.name"}, 
        {"ui": "Block size", "das": "block.size"}]}}
        coll.insert(self.pmap)

        self.mgr = DASMapping(config)
Пример #39
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.db = 'test_mapping.db'
        config = deepcopy(das_readconfig())
        dburi = config['mongodb']['dburi']
        logger = PrintManager('TestDASMapping', verbose=debug)
        config['logger'] = logger
        config['verbose'] = debug
        dbname = 'test_mapping'
        collname = 'db'
        config['mappingdb'] = dict(dburi=dburi,
                                   dbname=dbname,
                                   collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {
            "presentation": {
                "block": [{
                    "ui": "Block name",
                    "das": "block.name"
                }, {
                    "ui": "Block size",
                    "das": "block.size"
                }]
            },
            "type": "presentation"
        }
        self.coll.insert(self.pmap)

        url = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader/acquisitioneras/'
        dformat = 'JSON'
        system = 'dbs3'
        expire = 100
        rec = {
            'system':
            system,
            'urn':
            'acquisitioneras',
            'format':
            dformat,
            'instances': ['prod/global'],
            'url':
            url,
            'expire':
            expire,
            'lookup':
            'era',
            'params': {},
            'das_map': [{
                "das_key": "era",
                "rec_key": "era.name",
                "api_arg": "era"
            }],
            'type':
            'service'
        }
        self.coll.insert(rec)

        ver_token = verification_token(self.coll.find(**PYMONGO_OPTS))
        rec = {'verification_token': ver_token, 'type': 'verification_token'}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)
Пример #40
0
 def __init__(self, name, queue, **kwargs):
     self.name = name
     self.queue = queue
     self.extra = deepcopy(kwargs)
Пример #41
0
 def get_scheduled(self):
     "Get a de-classed list of current tasks for web interface."
     with self.lock:
         return deepcopy(self.scheduled)