示例#1
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'runsum', config)
     self.results = []
     self.params  = {'DB':'cms_omds_lb', 'FORMAT':'XML'}
     self._keys   = None
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#2
0
 def parser(self, query, dformat, source, api):
     """
     ReqMgr data-service parser.
     """
     if  api == 'inputdataset':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             try:
                 data = row['dataset']
                 data = \
                 data['WMCore.RequestManager.DataStructs.Request.Request']
                 if  data.has_key('InputDatasetTypes'):
                     arr = []
                     for key, val in data['InputDatasetTypes'].iteritems():
                         arr.append({'dataset':key, 'type':val})
                     data['InputDatasetTypes'] = arr
                 yield data
             except:
                 yield row
     elif api == 'configIDs':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             try:
                 for key, val in row['dataset'].iteritems():
                     yield dict(request_name=key, config_files=val)
             except:
                 pass
示例#3
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'combined', config)
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     self.dbs = 'dbs3'
     self.sites = {'tstamp': 0}  # local cache
     self.thr = 24 * 60 * 60  # 1 day for local cache
示例#4
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'combined', config)
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     self.dbs = 'dbs3'
     self.sites = {'tstamp': 0} # local cache
     self.thr = 24*60*60 # 1 day for local cache
示例#5
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'runsum', config)
     self.results = []
     self.params  = {'DB':'cms_omds_lb', 'FORMAT':'XML'}
     self._keys   = None
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#6
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'dbs3', config)
     self.reserved = ['api', 'apiversion']
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     self.prim_instance = config['dbs']['dbs_global_instance']
     self.instances = config['dbs']['dbs_instances']
示例#7
0
文件: dbs_service.py 项目: ktf/DAS
 def __init__(self, config):
     DASAbstractService.__init__(self, 'dbs', config)
     self.reserved = ['api', 'apiversion']
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     self.prim_instance = self.dasmapping.dbs_global_instance(self.name)
     self.instances = self.dasmapping.dbs_instances(self.name)
     self.extended_expire = config['dbs'].get('extended_expire', 0)
     self.extended_threshold = config['dbs'].get('extended_threshold', 0)
示例#8
0
    def __init__(self, config):
        DASAbstractService.__init__(self, 'xwho', config)
        self.map = self.dasmapping.servicemap(self.name)
        map_validator(self.map)

        self.re_summary_ids = re.compile(r'<a href="/xwho/people/([0-9]{6})">')

        self.re_find_name = re.compile(r'<h1>(.*?)</h1>')
        self.re_find_email = re.compile(r'<a href=mailto:(.*?)>')
        self.re_find_phone = re.compile(r'<b>Tel:</b>([0-9 ]+)')
示例#9
0
文件: dbs3_service.py 项目: ktf/DAS
 def __init__(self, config):
     DASAbstractService.__init__(self, "dbs3", config)
     self.reserved = ["api", "apiversion"]
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     self.prim_instance = self.dasmapping.dbs_global_instance(self.name)
     self.instances = self.dasmapping.dbs_instances(self.name)
     self.extended_expire = config["dbs"].get("extended_expire", 0)
     self.extended_threshold = config["dbs"].get("extended_threshold", 0)
     self.dbs_choice = config["das"].get("main_dbs", "dbs3")
示例#10
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'dbs3', config)
     self.reserved = ['api', 'apiversion']
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     self.prim_instance = self.dasmapping.dbs_global_instance(self.name)
     self.instances = self.dasmapping.dbs_instances(self.name)
     self.extended_expire = config['dbs'].get('extended_expire', 0)
     self.extended_threshold = config['dbs'].get('extended_threshold', 0)
     self.dbs_choice = config['das'].get('main_dbs', 'dbs3')
示例#11
0
文件: xwho_service.py 项目: dmwm/DAS
    def __init__(self, config):
        DASAbstractService.__init__(self, "xwho", config)
        self.map = self.dasmapping.servicemap(self.name)
        map_validator(self.map)

        self.re_summary_ids = re.compile(r'<a href="/xwho/people/([0-9]{6})">')

        self.re_find_name = re.compile(r"<h1>(.*?)</h1>")
        self.re_find_email = re.compile(r"<a href=mailto:(.*?)>")
        self.re_find_phone = re.compile(r"<b>Tel:</b>([0-9 ]+)")
示例#12
0
 def parser(self, query, dformat, source, api):
     """
     ReqMgr data-service parser.
     """
     if  api == 'inputdataset' or api == 'outputdataset':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             key = 'WMCore.RequestManager.DataStructs.Request.Request'
             try:
                 data = row['dataset']
                 if  isinstance(data, dict) and 'error' in data:
                     yield row
                 else:
                     data = data[key]
                     if  'InputDatasetTypes' in data:
                         arr = []
                         for key, val in \
                                 data['InputDatasetTypes'].items():
                             arr.append({'dataset':key, 'type':val})
                         data['InputDatasetTypes'] = arr
                     yield data
             except:
                 yield row
     elif api == 'datasetByPrepID':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             data = row['dataset']
             for val in data.values():
                 if  isinstance(val, basestring):
                     yield {'dataset':{'name': val}}
                 elif isinstance(val, list):
                     for vvv in val:
                         yield {'dataset':{'name': vvv}}
     elif api == 'recentDatasetByPrepID':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             yield {'dataset':{'name':row['dataset']}}
     elif api == 'configIDs':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             try:
                 data = row['dataset']
                 if  isinstance(data, dict) and 'error' in data:
                     yield row
                 else:
                     for key, val in data.items():
                         yield dict(request_name=key, config_files=val)
             except:
                 pass
     else:
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             yield row
示例#13
0
 def parser(self, query, dformat, source, api):
     """
     ReqMgr data-service parser.
     """
     if api == 'inputdataset' or api == 'outputdataset':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             key = 'WMCore.RequestManager.DataStructs.Request.Request'
             try:
                 data = row['dataset']
                 if isinstance(data, dict) and 'error' in data:
                     yield row
                 else:
                     data = data[key]
                     if 'InputDatasetTypes' in data:
                         arr = []
                         for key, val in \
                                 data['InputDatasetTypes'].items():
                             arr.append({'dataset': key, 'type': val})
                         data['InputDatasetTypes'] = arr
                     yield data
             except:
                 yield row
     elif api == 'datasetByPrepID':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             data = row['dataset']
             for val in data.values():
                 if isinstance(val, basestring):
                     yield {'dataset': {'name': val}}
                 elif isinstance(val, list):
                     for vvv in val:
                         yield {'dataset': {'name': vvv}}
     elif api == 'recentDatasetByPrepID':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             yield {'dataset': {'name': row['dataset']}}
     elif api == 'configIDs':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             try:
                 data = row['dataset']
                 if isinstance(data, dict) and 'error' in data:
                     yield row
                 else:
                     for key, val in data.items():
                         yield dict(request_name=key, config_files=val)
             except:
                 pass
     else:
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             yield row
示例#14
0
 def parser(self, query, dformat, source, api):
     """
     ReqMgr2 data-service parser.
     """
     if  api == 'inputdataset' or api == 'outputdataset':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             key = 'WMCore.RequestManager.DataStructs.Request.Request'
             try:
                 data = row['dataset']
                 if  data == {'result':[]}:
                     continue
                 if  isinstance(data, dict) and 'error' in data:
                     yield row
                 else:
                     data = data[key]
                     if  'InputDatasetTypes' in data:
                         arr = []
                         for key, val in \
                                 data['InputDatasetTypes'].items():
                             arr.append({'dataset':key, 'type':val})
                         data['InputDatasetTypes'] = arr
                     yield data
             except:
                 yield row
     elif api == 'datasetByPrepID':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             data = row['dataset']['result']
             for rdict in data:
                 for _, val in rdict.items():
                     for name in val['OutputDatasets']:
                         yield {'dataset':{'name': name}}
     elif api == 'configIDs':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             try:
                 data = row['dataset']
                 if  isinstance(data, dict) and 'error' in data:
                     yield row
                 else:
                     for key, val in data.items():
                         yield dict(request_name=key, config_files=val)
             except:
                 pass
     else:
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             yield row
示例#15
0
 def parser(self, query, dformat, source, api):
     """
     ReqMgr2 data-service parser.
     """
     if api == 'inputdataset' or api == 'outputdataset':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             key = 'WMCore.RequestManager.DataStructs.Request.Request'
             try:
                 data = row['dataset']
                 if data == {'result': []}:
                     continue
                 if isinstance(data, dict) and 'error' in data:
                     yield row
                 else:
                     data = data[key]
                     if 'InputDatasetTypes' in data:
                         arr = []
                         for key, val in \
                                 data['InputDatasetTypes'].items():
                             arr.append({'dataset': key, 'type': val})
                         data['InputDatasetTypes'] = arr
                     yield data
             except:
                 yield row
     elif api == 'datasetByPrepID':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             data = row['dataset']['result']
             for rdict in data:
                 for _, val in rdict.items():
                     for name in val['OutputDatasets']:
                         yield {'dataset': {'name': name}}
     elif api == 'configIDs':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             try:
                 data = row['dataset']
                 if isinstance(data, dict) and 'error' in data:
                     yield row
                 else:
                     for key, val in data.items():
                         yield dict(request_name=key, config_files=val)
             except:
                 pass
     else:
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             yield row
示例#16
0
    def __init__(self, config):
        DASAbstractService.__init__(self, 'cmsswconfigs', config)
        self.headers = {'Accept': 'text/json;application/json'}
        self.map = self.dasmapping.servicemap(self.name)
        map_validator(self.map)

        # specify access to DB
        dburi = config.get('dburi')
        self.conn = db_connection(dburi)
        database  = self.conn['configdb']
        self.managers = {}
        for release in database.collection_names():
            if  release.find('index') == -1:
                self.managers[release] = MongoQuery(release)
        self.releases = self.managers.keys()
示例#17
0
    def __init__(self, config):
        DASAbstractService.__init__(self, 'cmsswconfigs', config)
        self.headers = {'Accept': 'text/json;application/json'}
        self.map = self.dasmapping.servicemap(self.name)
        map_validator(self.map)

        # specify access to DB
        dburi = config.get('dburi')
        self.conn = db_connection(dburi)
        database = self.conn['configdb']
        self.managers = {}
        for release in database.collection_names():
            if release.find('index') == -1:
                self.managers[release] = MongoQuery(release)
        self.releases = list(self.managers.keys())
示例#18
0
 def parser(self, query, dformat, source, api):
     """
     CondDB data-service parser.
     """
     gen = DASAbstractService.parser(self, query, dformat, source, api)
     for row in gen:
         yield row
示例#19
0
 def parser(self, query, dformat, source, api):
     """
     DBS3 data-service parser.
     """
     if  api == 'site4dataset':
         sites = set()
         for rec in json_parser(source, self.logger):
             if  isinstance(rec, list):
                 for row in rec:
                     orig_site = row['origin_site_name']
                     if  orig_site not in sites:
                         sites.add(orig_site)
             else:
                 orig_site = rec.get('origin_site_name', None)
                 if  orig_site and orig_site not in sites:
                     sites.add(orig_site)
         for site in sites:
             yield {'site': {'name': site}}
     elif api == 'filesummaries':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             yield row['dataset']
     elif api == 'blockparents':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             try:
                 del row['parent']['this_block_name']
             except:
                 pass
             yield row
     elif api == 'fileparents':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             parent = row['parent']
             for val in parent['parent_logical_file_name']:
                 yield dict(name=val)
     elif api == 'filechildren':
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             parent = row['child']
             for val in parent['child_logical_file_name']:
                 yield dict(name=val)
     else:
         gen = DASAbstractService.parser(self, query, dformat, source, api)
         for row in gen:
             yield row
示例#20
0
 def parser(self, query, dformat, source, api):
     """
     CondDB data-service parser.
     """
     gen = DASAbstractService.parser(self, query, dformat, source, api)
     for row in gen:
         if  api == 'get_lumi_info':
             for lumi in row['lumi']['Lumi']:
                 yield lumi
         else:
             yield row
示例#21
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'lumidb', config)
     self.map = self.dasmapping.servicemap(self.name, 'javaservlet')
     map_validator(self.map)
示例#22
0
 def __init__(self, config):
     DASAbstractService.__init__(self, "conddb", config)
     self.reserved = ["api", "apiversion"]
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#23
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'phedex', config)
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     self.notationmap = self.notations()
示例#24
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'runregistry', config)
     self.headers = {'Accept': 'text/json;application/json'}
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#25
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'reqmgr2', config)
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#26
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'phedex', config)
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     self.notationmap = self.notations()
示例#27
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'dashboard', config)
     self.headers = {'Accept': 'text/xml'}
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#28
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'conddb', config)
     self.reserved = ['api', 'apiversion']
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#29
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'dashboard', config)
     self.headers = {'Accept': 'text/xml'}
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#30
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'lumidb', config)
     self.map = self.dasmapping.servicemap(self.name, 'javaservlet')
     map_validator(self.map)
示例#31
0
 def parser_helper(self, query, dformat, source, api):
     """
     DBS3 data-service parser helper, it is used by parser method.
     """
     if  api in ['site4dataset', 'site4block']:
         gen = json_parser(source, self.logger)
     else:
         gen = DASAbstractService.parser(self, query, dformat, source, api)
     if  api in ['site4dataset', 'site4block']:
         sites = set()
         for rec in gen:
             if  isinstance(rec, list):
                 for row in rec:
                     orig_site = row['origin_site_name']
                     if  orig_site not in sites:
                         sites.add(orig_site)
             else:
                 orig_site = rec.get('origin_site_name', None)
                 if  orig_site and orig_site not in sites:
                     sites.add(orig_site)
         for site in sites:
             yield {'site': {'name': site}}
     elif api == 'datasets' or api == 'dataset_info' or api == 'datasetlist':
         for row in gen:
             row['name'] = row['dataset']
             del row['dataset']
             yield {'dataset':row}
     elif api == 'filesummaries':
         name = query.mongo_query['spec']['dataset.name']
         for row in gen:
             row['dataset']['name'] = name
             yield row
     elif api == 'summary4dataset_run' or api == 'summary4block_run':
         spec = query.mongo_query.get('spec', {})
         dataset = spec.get('dataset.name', '')
         block = spec.get('block.name', '')
         run = spec.get('run.run_number', 0)
         if  isinstance(run, dict): # we got a run range
             if  '$in' in run:
                 run = run['$in']
             elif '$lte' in run:
                 run = range(run['$gte'], run['$lte'])
         for row in gen:
             if  run:
                 row.update({"run": run})
             if  dataset:
                 row.update({"dataset": dataset})
             if  block:
                 row.update({"block": block})
             yield row
     elif api == 'releaseversions':
         for row in gen:
             values = row['release']['release_version']
             for val in values:
                 yield dict(release=dict(name=val))
     elif api == 'datasetaccesstypes':
         for row in gen:
             values = row['status']['dataset_access_type']
             for val in values:
                 yield dict(status=dict(name=val))
     elif api == 'blockorigin':
         for row in gen:
             yield row
     elif api == 'blockparents':
         for row in gen:
             try:
                 del row['parent']['this_block_name']
             except:
                 pass
             yield row
     elif api == 'fileparents':
         for row in gen:
             parent = row['parent']
             for val in parent['parent_logical_file_name']:
                 yield dict(name=val)
     elif api == 'runs_via_dataset' or api == 'runs':
         for row in gen:
             values = row.get('run', {}).get('run_num', 'N/A')
             if  isinstance(values, list):
                 for val in values:
                     yield dict(run_number=val)
             else:
                 yield dict(run_number=values)
     elif api == 'filechildren':
         for row in gen:
             parent = row['child']
             for val in parent['child_logical_file_name']:
                 yield dict(name=val)
     elif api == 'files' or api == 'files_via_dataset' or \
         api == 'files_via_block':
         status = 'VALID'
         for row in gen:
             if  'spec' in query.mongo_query:
                 if  'status.name' in query.mongo_query['spec']:
                     status = query.mongo_query['spec']['status.name']
             try:
                 file_status = row['file']['is_file_valid']
             except KeyError:
                 file_status = 0 # file status is unknown
             if  status == '*': # any file
                 pass
             elif  status == 'INVALID': # filter out valid files
                 if  int(file_status) == 1:# valid status
                     row = None
             else: # filter out invalid files
                 if  int(file_status) == 0:# invalid status
                     row = None
             if  row:
                 yield row
     elif api == 'filelumis' or api == 'filelumis4block':
         for row in gen:
             if  'lumi' in row:
                 if  'lumi_section_num' in row['lumi']:
                     val = row['lumi']['lumi_section_num']
                     row['lumi']['lumi_section_num'] = convert2ranges(val)
                 yield row
             else:
                 yield row
     else:
         for row in gen:
             yield row
示例#32
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'combined', config)
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
     dbs = config['das'].get('main_dbs', 'dbs')
     self.dbs = dbs
示例#33
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'conddb', config)
     self.reserved = ['api', 'apiversion']
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#34
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'dq', config)
     self._keys = None
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#35
0
文件: dbs3_service.py 项目: ktf/DAS
 def parser_helper(self, query, dformat, source, api):
     """
     DBS3 data-service parser helper, it is used by parser method.
     """
     if api == "site4dataset":
         gen = json_parser(source, self.logger)
     else:
         gen = DASAbstractService.parser(self, query, dformat, source, api)
     if api == "site4dataset":
         sites = set()
         for rec in gen:
             if isinstance(rec, list):
                 for row in rec:
                     orig_site = row["origin_site_name"]
                     if orig_site not in sites:
                         sites.add(orig_site)
             else:
                 orig_site = rec.get("origin_site_name", None)
                 if orig_site and orig_site not in sites:
                     sites.add(orig_site)
         for site in sites:
             yield {"site": {"name": site}}
     elif api == "datasets" or api == "dataset_info":
         for row in gen:
             row["name"] = row["dataset"]
             del row["dataset"]
             yield {"dataset": row}
     elif api == "filesummaries":
         name = query.mongo_query["spec"]["dataset.name"]
         for row in gen:
             row["dataset"]["name"] = name
             yield row
     elif api == "summary4dataset_run" or api == "summary4block_run":
         spec = query.mongo_query.get("spec", {})
         dataset = spec.get("dataset.name", "")
         block = spec.get("block.name", "")
         run = spec.get("run.run_number", 0)
         if isinstance(run, dict):  # we got a run range
             if "$in" in run:
                 run = run["$in"]
             elif "$lte" in run:
                 run = range(run["$gte"], run["$lte"])
         for row in gen:
             if run:
                 row.update({"run": run})
             if dataset:
                 row.update({"dataset": dataset})
             if block:
                 row.update({"block": block})
             yield row
     elif api == "blockorigin":
         for row in gen:
             yield row
     elif api == "blockparents":
         for row in gen:
             try:
                 del row["parent"]["this_block_name"]
             except:
                 pass
             yield row
     elif api == "fileparents":
         for row in gen:
             parent = row["parent"]
             for val in parent["parent_logical_file_name"]:
                 yield dict(name=val)
     elif api == "runs_via_dataset" or api == "runs":
         for row in gen:
             values = row["run"]["run_num"]
             if isinstance(values, list):
                 for val in values:
                     yield dict(run_number=val)
             else:
                 yield dict(run_number=values)
     elif api == "filechildren":
         for row in gen:
             parent = row["child"]
             for val in parent["child_logical_file_name"]:
                 yield dict(name=val)
     elif api == "files" or api == "files_via_dataset" or api == "files_via_block":
         status = "VALID"
         for row in gen:
             if "spec" in query.mongo_query:
                 if "status.name" in query.mongo_query["spec"]:
                     status = query.mongo_query["spec"]["status.name"]
             file_status = row["file"]["is_file_valid"]
             if status == "INVALID":  # filter out valid files
                 if int(file_status) == 1:  # valid status
                     row = None
             else:  # filter out invalid files
                 if int(file_status) == 0:  # invalid status
                     row = None
             if row:
                 yield row
     elif api == "filelumis" or api == "filelumis4block":
         for row in gen:
             if "lumi" in row:
                 if "lumi_section_num" in row["lumi"]:
                     val = row["lumi"]["lumi_section_num"]
                     row["lumi"]["lumi_section_num"] = convert2ranges(val)
                 yield row
             else:
                 yield row
     else:
         for row in gen:
             yield row
示例#36
0
文件: dq_service.py 项目: dmwm/DAS
 def __init__(self, config):
     DASAbstractService.__init__(self, "dq", config)
     self._keys = None
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#37
0
文件: reqmgr_service.py 项目: ktf/DAS
 def __init__(self, config):
     DASAbstractService.__init__(self, 'reqmgr', config)
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)
示例#38
0
 def __init__(self, config):
     DASAbstractService.__init__(self, 'runregistry', config)
     self.headers = {'Accept': 'text/json;application/json'}
     self.map = self.dasmapping.servicemap(self.name)
     map_validator(self.map)