def __init__(self, config, datasetExpr, datasetNick = None): self._changeTrigger = triggerResync(['datasets', 'parameters']) self._lumi_filter = config.getLookup('lumi filter', {}, parser = parseLumiFilter, strfun = strLumi, onChange = self._changeTrigger) if not self._lumi_filter.empty(): config.set('dataset processor', 'LumiDataProcessor', '+=') DataProvider.__init__(self, config, datasetExpr, datasetNick) # LumiDataProcessor instantiated in DataProcessor.__ini__ will set lumi metadata as well self._lumi_query = config.getBool('lumi metadata', not self._lumi_filter.empty(), onChange = self._changeTrigger) config.set('phedex sites matcher mode', 'shell', '?=') # PhEDex blacklist: 'T1_*_Disk nodes allow user jobs - other T1's dont! self._phedexFilter = config.getFilter('phedex sites', '-* T1_*_Disk T2_* T3_*', defaultMatcher = 'blackwhite', defaultFilter = 'strict', onChange = self._changeTrigger) self._onlyComplete = config.getBool('only complete sites', True, onChange = self._changeTrigger) self._locationFormat = config.getEnum('location format', CMSLocationFormat, CMSLocationFormat.hostname, onChange = self._changeTrigger) self._pjrc = JSONRestClient(url = 'https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas') self._sitedb = SiteDB() (self._datasetPath, self._datasetInstance, self._datasetBlock) = optSplit(datasetExpr, '@#') instance_default = config.get('dbs instance', '', onChange = self._changeTrigger) self._datasetInstance = self._datasetInstance or instance_default if not self._datasetInstance: self._datasetInstance = 'prod/global' elif '/' not in self._datasetInstance: self._datasetInstance = 'prod/%s' % self._datasetInstance self._datasetBlock = self._datasetBlock or 'all' self.onlyValid = config.getBool('only valid', True, onChange = self._changeTrigger)
def __init__(self, config, datasource_name, dataset_expr, dataset_nick=None, dataset_proc=None): dataset_config = config.change_view(default_on_change=TriggerResync(['datasets', 'parameters'])) self._lumi_filter = dataset_config.get_lookup(['lumi filter', '%s lumi filter' % datasource_name], default={}, parser=parse_lumi_filter, strfun=str_lumi) if not self._lumi_filter.empty(): config.set('%s processor' % datasource_name, 'LumiDataProcessor', '+=') DataProvider.__init__(self, config, datasource_name, dataset_expr, dataset_nick, dataset_proc) # LumiDataProcessor instantiated in DataProcessor.__ini__ will set lumi metadata as well self._lumi_query = dataset_config.get_bool( ['lumi metadata', '%s lumi metadata' % datasource_name], default=not self._lumi_filter.empty()) config.set('phedex sites matcher mode', 'ShellStyleMatcher', '?=') # PhEDex blacklist: 'T1_*_Disk nodes allow user jobs - other T1's dont! self._phedex_filter = dataset_config.get_filter('phedex sites', '-* T1_*_Disk T2_* T3_*', default_matcher='BlackWhiteMatcher', default_filter='StrictListFilter') self._only_complete = dataset_config.get_bool('only complete sites', True) self._only_valid = dataset_config.get_bool('only valid', True) self._location_format = dataset_config.get_enum('location format', CMSLocationFormat, CMSLocationFormat.hostname) self._pjrc = JSONRestClient(url='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas') self._sitedb = SiteDB() dataset_expr_parts = split_opt(dataset_expr, '@#') (self._dataset_path, self._dataset_instance, self._dataset_block_selector) = dataset_expr_parts instance_default = dataset_config.get('dbs instance', '') self._dataset_instance = self._dataset_instance or instance_default if not self._dataset_instance: self._dataset_instance = 'prod/global' elif '/' not in self._dataset_instance: self._dataset_instance = 'prod/%s' % self._dataset_instance self._dataset_block_selector = self._dataset_block_selector or 'all'
def __init__(self, config, datasetExpr, datasetNick=None, datasetID=0): self._lumi_filter = parseLumiFilter(config.get('lumi filter', '')) if self._lumi_filter: config.set('dataset processor', 'LumiDataProcessor', '+=') DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID) # PhEDex blacklist: 'T1_DE_KIT', 'T1_US_FNAL' and '*_Disk' allow user jobs - other T1's dont! self._lumi_query = config.getBool('lumi metadata', self._lumi_filter != []) self._phedexFilter = config.getFilter('phedex sites', '-T3_US_FNALLPC', defaultMatcher='blackwhite', defaultFilter='weak') self._phedexT1Filter = config.getFilter('phedex t1 accept', 'T1_DE_KIT T1_US_FNAL', defaultMatcher='blackwhite', defaultFilter='weak') self._phedexT1Mode = config.get('phedex t1 mode', 'disk').lower() self.onlyComplete = config.getBool('only complete sites', True) self._locationFormat = config.getEnum('location format', CMSLocationFormat, CMSLocationFormat.hostname) (self._datasetPath, self._url, self._datasetBlock) = utils.optSplit(datasetExpr, '@#') self._url = self._url or config.get('dbs instance', '') self._datasetBlock = self._datasetBlock or 'all' self.onlyValid = config.getBool('only valid', True)
def __init__(self, config, datasource_name, dataset_expr, dataset_nick=None, dataset_proc=None): dataset_config = config.change_view( default_on_change=TriggerResync(['datasets', 'parameters'])) self._lumi_filter = dataset_config.get_lookup( ['lumi filter', '%s lumi filter' % datasource_name], default={}, parser=parse_lumi_filter, strfun=str_lumi) if not self._lumi_filter.empty(): config.set('%s processor' % datasource_name, 'LumiDataProcessor', '+=') DataProvider.__init__(self, config, datasource_name, dataset_expr, dataset_nick, dataset_proc) # LumiDataProcessor instantiated in DataProcessor.__ini__ will set lumi metadata as well self._lumi_query = dataset_config.get_bool( ['lumi metadata', '%s lumi metadata' % datasource_name], default=not self._lumi_filter.empty()) config.set('phedex sites matcher mode', 'ShellStyleMatcher', '?=') # PhEDex blacklist: 'T1_*_Disk nodes allow user jobs - other T1's dont! self._phedex_filter = dataset_config.get_filter( 'phedex sites', '-* T1_*_Disk T2_* T3_*', default_matcher='BlackWhiteMatcher', default_filter='StrictListFilter') self._only_complete = dataset_config.get_bool('only complete sites', True) self._only_valid = dataset_config.get_bool('only valid', True) self._allow_phedex = dataset_config.get_bool('allow phedex', True) self._location_format = dataset_config.get_enum( 'location format', CMSLocationFormat, CMSLocationFormat.hostname) self._pjrc = JSONRestClient( url='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas' ) self._sitedb = SiteDB() dataset_expr_parts = split_opt(dataset_expr, '@#') (self._dataset_path, self._dataset_instance, self._dataset_block_selector) = dataset_expr_parts instance_default = dataset_config.get('dbs instance', '') self._dataset_instance = self._dataset_instance or instance_default if not self._dataset_instance: self._dataset_instance = 'prod/global' elif '/' not in self._dataset_instance: self._dataset_instance = 'prod/%s' % self._dataset_instance self._dataset_block_selector = self._dataset_block_selector or 'all'
def __init__(self, config, datasetExpr, datasetNick=None, datasetID=0): changeTrigger = triggerResync(['datasets', 'parameters']) self._lumi_filter = config.getLookup('lumi filter', {}, parser=parseLumiFilter, strfun=strLumi, onChange=changeTrigger) if not self._lumi_filter.empty(): config.set('dataset processor', 'LumiDataProcessor', '+=') DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID) # LumiDataProcessor instantiated in DataProcessor.__ini__ will set lumi metadata as well self._lumi_query = config.getBool('lumi metadata', not self._lumi_filter.empty(), onChange=changeTrigger) # PhEDex blacklist: 'T1_DE_KIT', 'T1_US_FNAL' and '*_Disk' allow user jobs - other T1's dont! self._phedexFilter = config.getFilter('phedex sites', '-T3_US_FNALLPC', defaultMatcher='blackwhite', defaultFilter='weak', onChange=changeTrigger) self._phedexT1Filter = config.getFilter('phedex t1 accept', 'T1_DE_KIT T1_US_FNAL', defaultMatcher='blackwhite', defaultFilter='weak', onChange=changeTrigger) self._phedexT1Mode = config.getEnum('phedex t1 mode', PhedexT1Mode, PhedexT1Mode.disk, onChange=changeTrigger) self.onlyComplete = config.getBool('only complete sites', True, onChange=changeTrigger) self._locationFormat = config.getEnum('location format', CMSLocationFormat, CMSLocationFormat.hostname, onChange=changeTrigger) self._pjrc = JSONRestClient( url='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas' ) (self._datasetPath, self._url, self._datasetBlock) = optSplit(datasetExpr, '@#') self._url = self._url or config.get('dbs instance', '') self._datasetBlock = self._datasetBlock or 'all' self.onlyValid = config.getBool('only valid', True, onChange=changeTrigger)
def __init__(self, config, datasetExpr, datasetNick=None): self._changeTrigger = triggerResync(['datasets', 'parameters']) self._lumi_filter = config.getLookup('lumi filter', {}, parser=parseLumiFilter, strfun=strLumi, onChange=self._changeTrigger) if not self._lumi_filter.empty(): config.set('dataset processor', 'LumiDataProcessor', '+=') DataProvider.__init__(self, config, datasetExpr, datasetNick) # LumiDataProcessor instantiated in DataProcessor.__ini__ will set lumi metadata as well self._lumi_query = config.getBool('lumi metadata', not self._lumi_filter.empty(), onChange=self._changeTrigger) config.set('phedex sites matcher mode', 'shell', '?=') # PhEDex blacklist: 'T1_*_Disk nodes allow user jobs - other T1's dont! self._phedexFilter = config.getFilter('phedex sites', '-* T1_*_Disk T2_* T3_*', defaultMatcher='blackwhite', defaultFilter='strict', onChange=self._changeTrigger) self._onlyComplete = config.getBool('only complete sites', True, onChange=self._changeTrigger) self._locationFormat = config.getEnum('location format', CMSLocationFormat, CMSLocationFormat.hostname, onChange=self._changeTrigger) self._pjrc = JSONRestClient( url='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas' ) self._sitedb = SiteDB() (self._datasetPath, self._datasetInstance, self._datasetBlock) = optSplit(datasetExpr, '@#') instance_default = config.get('dbs instance', '', onChange=self._changeTrigger) self._datasetInstance = self._datasetInstance or instance_default if not self._datasetInstance: self._datasetInstance = 'prod/global' elif '/' not in self._datasetInstance: self._datasetInstance = 'prod/%s' % self._datasetInstance self._datasetBlock = self._datasetBlock or 'all' self.onlyValid = config.getBool('only valid', True, onChange=self._changeTrigger)
def __init__(self, config, datasetExpr, datasetNick = None, datasetID = 0): self._lumi_filter = parseLumiFilter(config.get('lumi filter', '')) if self._lumi_filter: config.set('dataset processor', 'LumiDataProcessor', '+=') DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID) # PhEDex blacklist: 'T1_DE_KIT', 'T1_US_FNAL' and '*_Disk' allow user jobs - other T1's dont! self._lumi_query = config.getBool('lumi metadata', self._lumi_filter != []) self._phedexFilter = config.getFilter('phedex sites', '-T3_US_FNALLPC', defaultMatcher = 'blackwhite', defaultFilter = 'weak') self._phedexT1Filter = config.getFilter('phedex t1 accept', 'T1_DE_KIT T1_US_FNAL', defaultMatcher = 'blackwhite', defaultFilter = 'weak') self._phedexT1Mode = config.get('phedex t1 mode', 'disk').lower() self.onlyComplete = config.getBool('only complete sites', True) self._locationFormat = config.getEnum('location format', CMSLocationFormat, CMSLocationFormat.hostname) (self._datasetPath, self._url, self._datasetBlock) = utils.optSplit(datasetExpr, '@#') self._url = self._url or config.get('dbs instance', '') self._datasetBlock = self._datasetBlock or 'all' self.onlyValid = config.getBool('only valid', True)
def __init__(self, config, datasetExpr, datasetNick = None, datasetID = 0): changeTrigger = triggerResync(['datasets', 'parameters']) self._lumi_filter = config.getLookup('lumi filter', {}, parser = parseLumiFilter, strfun = strLumi, onChange = changeTrigger) if not self._lumi_filter.empty(): config.set('dataset processor', 'LumiDataProcessor', '+=') DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID) # LumiDataProcessor instantiated in DataProcessor.__ini__ will set lumi metadata as well self._lumi_query = config.getBool('lumi metadata', not self._lumi_filter.empty(), onChange = changeTrigger) # PhEDex blacklist: 'T1_DE_KIT', 'T1_US_FNAL' and '*_Disk' allow user jobs - other T1's dont! self._phedexFilter = config.getFilter('phedex sites', '-T3_US_FNALLPC', defaultMatcher = 'blackwhite', defaultFilter = 'weak', onChange = changeTrigger) self._phedexT1Filter = config.getFilter('phedex t1 accept', 'T1_DE_KIT T1_US_FNAL', defaultMatcher = 'blackwhite', defaultFilter = 'weak', onChange = changeTrigger) self._phedexT1Mode = config.getEnum('phedex t1 mode', PhedexT1Mode, PhedexT1Mode.disk, onChange = changeTrigger) self.onlyComplete = config.getBool('only complete sites', True, onChange = changeTrigger) self._locationFormat = config.getEnum('location format', CMSLocationFormat, CMSLocationFormat.hostname, onChange = changeTrigger) self._pjrc = JSONRestClient(url = 'https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas') (self._datasetPath, self._url, self._datasetBlock) = optSplit(datasetExpr, '@#') self._url = self._url or config.get('dbs instance', '') self._datasetBlock = self._datasetBlock or 'all' self.onlyValid = config.getBool('only valid', True, onChange = changeTrigger)
def __init__(self, config, datasetExpr, datasetNick, datasetID = 0): DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID) # PhEDex blacklist: 'T1_DE_KIT', 'T1_US_FNAL' and '*_Disk' allow user jobs - other T1's dont! self.phedexBL = config.getList('phedex sites', ['-T3_US_FNALLPC']) self.phedexWL = config.getList('phedex t1 accept', ['T1_DE_KIT', 'T1_US_FNAL']) self.phedexT1 = config.get('phedex t1 mode', 'disk').lower() self.onlyComplete = config.getBool('only complete sites', True) self.locationFormat = config.get('location format', 'hostname').lower() # hostname or sitedb if self.locationFormat not in ['hostname', 'sitedb', 'both']: raise ConfigError('Invalid location format: %s' % self.locationFormat) (self.datasetPath, self.url, self.datasetBlock) = utils.optSplit(datasetExpr, '@#') self.url = QM(self.url, self.url, config.get('dbs instance', '')) self.datasetBlock = QM(self.datasetBlock, self.datasetBlock, 'all') self.includeLumi = config.getBool('keep lumi metadata', False) self.onlyValid = config.getBool('only valid', True) self.checkUnique = config.getBool('check unique', True) # This works in tandem with active task module (cmssy.py supports only [section] lumi filter!) self.selectedLumis = parseLumiFilter(config.get('lumi filter', '')) if self.selectedLumis: utils.vprint('Runs/lumi section filter enabled! (%d entries)' % len(self.selectedLumis), -1, once = True) utils.vprint('\tThe following runs and lumi sections are selected:', 1, once = True) utils.vprint('\t' + utils.wrapList(formatLumi(self.selectedLumis), 65, ',\n\t'), 1, once = True)