def getConfig(configFile = None, configDict = None, section = None, additional = None):
	if configDict and section:
		configDict = {section: configDict}
	config = createConfig(configFile, configDict, additional = additional)
	if section:
		return config.changeView(addSections = [section])
	return config
Exemple #2
0
def getConfig(configFile = None, configDict = None, section = None, additional = None):
	if configDict and section:
		configDict = {section: configDict}
	config = createConfig(configFile, configDict, additional = additional)
	if section:
		return config.changeView(addSections = [section])
	return config
Exemple #3
0
 def getEntries(self, path, metadata, events, seList, objStore):
     datacachePath = os.path.join(objStore.get('GC_WORKDIR', ''),
                                  'datacache.dat')
     source = utils.QM((self._source == '')
                       and os.path.exists(datacachePath), datacachePath,
                       self._source)
     if source and (source not in self._lfnMap):
         pSource = DataProvider.createInstance('ListProvider',
                                               createConfig(), source)
         for (n, fl) in imap(
                 lambda b:
             (b[DataProvider.Dataset], b[DataProvider.FileList]),
                 pSource.getBlocks()):
             self._lfnMap.setdefault(source, {}).update(
                 dict(
                     imap(
                         lambda fi:
                         (self.lfnTrans(fi[DataProvider.URL]), n), fl)))
     pList = set()
     for key in ifilter(lambda k: k in metadata, self._parentKeys):
         pList.update(
             imap(
                 lambda pPath: self._lfnMap.get(source, {}).get(
                     self.lfnTrans(pPath)), metadata[key]))
     metadata['PARENT_PATH'] = lfilter(identity, pList)
     yield (path, metadata, events, seList, objStore)
 def loadFromFile(path):
     return DataProvider.createInstance(
         'ListProvider',
         createConfig(useDefaultFiles=False,
                      configDict={
                          'dataset': {
                              'dataset processor': 'NullDataProcessor'
                          }
                      }), path)
	def __init__(self, config):
		InfoScanner.__init__(self, config)
		newVerbosity = utils.verbosity(utils.verbosity() - 3)
		extConfigFN = config.getPath('source config')
		extConfig = createConfig(extConfigFN).changeView(setSections = ['global'])
		self._extWorkDir = extConfig.getWorkPath()
		self._extTask = extConfig.getPlugin(['task', 'module'], cls = 'TaskModule')
		selector = config.get('source job selector', '')
		extJobDB = JobDB(extConfig, jobSelector = lambda jobNum, jobObj: jobObj.state == Job.SUCCESS)
		self._selected = sorted(extJobDB.getJobs(JobSelector.create(selector, task = self._extTask)))
		utils.verbosity(newVerbosity + 3)
	def getEntries(self, path, metadata, events, seList, objStore):
		datacachePath = os.path.join(objStore.get('GC_WORKDIR', ''), 'datacache.dat')
		source = utils.QM((self._source == '') and os.path.exists(datacachePath), datacachePath, self._source)
		if source and (source not in self._lfnMap):
			pSource = DataProvider.createInstance('ListProvider', createConfig(), source)
			for (n, fl) in imap(lambda b: (b[DataProvider.Dataset], b[DataProvider.FileList]), pSource.getBlocks()):
				self._lfnMap.setdefault(source, {}).update(dict(imap(lambda fi: (self.lfnTrans(fi[DataProvider.URL]), n), fl)))
		pList = set()
		for key in ifilter(lambda k: k in metadata, self._parentKeys):
			pList.update(imap(lambda pPath: self._lfnMap.get(source, {}).get(self.lfnTrans(pPath)), metadata[key]))
		metadata['PARENT_PATH'] = lfilter(identity, pList)
		yield (path, metadata, events, seList, objStore)
	def __init__(self, config):
		InfoScanner.__init__(self, config)
		ext_config_fn = config.getPath('source config')
		ext_config = createConfig(ext_config_fn).changeView(setSections = ['global'])
		self._extWorkDir = ext_config.getWorkPath()
		logging.getLogger('user').disabled = True
		self._extWorkflow = ext_config.getPlugin('workflow', 'Workflow:global', cls = 'Workflow',
			pargs = ('task',))
		logging.getLogger('user').disabled = False
		self._extTask = self._extWorkflow.task
		selector = config.get('source job selector', '')
		ext_job_db = JobDB(ext_config, jobSelector = lambda jobNum, jobObj: jobObj.state == Job.SUCCESS)
		self._selected = sorted(ext_job_db.getJobs(JobSelector.create(selector, task = self._extTask)))
 def __init__(self, config):
     InfoScanner.__init__(self, config)
     newVerbosity = utils.verbosity(utils.verbosity() - 3)
     extConfigFN = config.getPath('source config')
     extConfig = createConfig(extConfigFN).changeView(
         setSections=['global'])
     self._extWorkDir = extConfig.getWorkPath()
     self._extTask = extConfig.getPlugin(['task', 'module'],
                                         cls='TaskModule')
     selector = config.get('source job selector', '')
     extJobDB = JobDB(
         extConfig,
         jobSelector=lambda jobNum, jobObj: jobObj.state == Job.SUCCESS)
     self._selected = sorted(
         extJobDB.getJobs(JobSelector.create(selector, task=self._extTask)))
     utils.verbosity(newVerbosity + 3)
	def __init__(self, config, datasetExpr, datasetNick = None, datasetID = 0):
		if os.path.isdir(datasetExpr):
			GCProvider.stageDir[None] = ['OutputDirsFromWork']
			config.set('source directory', datasetExpr)
			datasetExpr = os.path.join(datasetExpr, 'work.conf')
		else:
			GCProvider.stageDir[None] = ['OutputDirsFromConfig', 'MetadataFromTask']
			datasetExpr, selector = utils.optSplit(datasetExpr, '%')
			config.set('source config', datasetExpr)
			config.set('source job selector', selector)
		extConfig = createConfig(datasetExpr)
		extModule = extConfig.changeView(setSections = ['global']).get(['task', 'module'])
		if 'ParaMod' in extModule: # handle old config files
			extModule = extConfig.changeView(setSections = ['ParaMod']).get('module')
		sGet = lambda scannerDict: scannerDict.get(None) + scannerDict.get(extModule, [])
		sList = sGet(GCProvider.stageDir) + ['JobInfoFromOutputDir', 'FilesFromJobInfo'] + sGet(GCProvider.stageFile) + ['DetermineEvents', 'AddFilePrefix']
		ScanProviderBase.__init__(self, config, sList, datasetNick, datasetID)
	def loadPartitionsForScript(path, cfg = None):
		src = DataSplitterIO.createInstance('DataSplitterIOAuto').loadSplitting(path)
		# Transfer config protocol (in case no split function is called)
		protocol = {}
		for (section, options) in src.metadata.items():
			section = section.replace('dataset', '').strip()
			for (option, value) in options.items():
				if section:
					option = '[%s] %s' % (section, option)
				protocol[option.strip()] = value
				if cfg is not None:
					cfg.set(option, str(value))
		# Create and setup splitter
		if cfg is None:
			cfg = createConfig(configDict = src.metadata)
		splitter = DataSplitter.createInstance(src.classname, cfg)
		splitter.setState(src, protocol)
		return splitter
Exemple #11
0
 def loadPartitionsForScript(path, cfg=None):
     src = DataSplitterIO.createInstance(
         'DataSplitterIOAuto').loadSplitting(path)
     # Transfer config protocol (in case no split function is called)
     protocol = {}
     for (section, options) in src.metadata.items():
         section = section.replace('dataset', '').strip()
         for (option, value) in options.items():
             if section:
                 option = '[%s] %s' % (section, option)
             protocol[option.strip()] = value
             if cfg is not None:
                 cfg.set(option, str(value))
     # Create and setup splitter
     if cfg is None:
         cfg = createConfig(configDict=src.metadata)
     splitter = DataSplitter.createInstance(src.classname, cfg)
     splitter.setState(src, protocol)
     return splitter
Exemple #12
0
 def __init__(self, config):
     InfoScanner.__init__(self, config)
     ext_config_fn = config.getPath('source config')
     ext_config = createConfig(ext_config_fn).changeView(
         setSections=['global'])
     self._extWorkDir = ext_config.getWorkPath()
     logging.getLogger('user').disabled = True
     self._extWorkflow = ext_config.getPlugin('workflow',
                                              'Workflow:global',
                                              cls='Workflow',
                                              pargs=('task', ))
     logging.getLogger('user').disabled = False
     self._extTask = self._extWorkflow.task
     selector = config.get('source job selector', '')
     ext_job_db = JobDB(
         ext_config,
         jobSelector=lambda jobNum, jobObj: jobObj.state == Job.SUCCESS)
     self._selected = sorted(
         ext_job_db.getJobs(JobSelector.create(selector,
                                               task=self._extTask)))
	def loadFromFile(path):
		return DataProvider.createInstance('ListProvider', createConfig(useDefaultFiles = False,
			configDict = {'dataset': {'dataset processor': 'NullDataProcessor'}}), path)
Exemple #14
0
def gc_create_config(cmd_line_args):
    (_, args) = gc_cmd_line_parser(cmd_line_args)
    return createConfig(configFile=args[0],
                        additional=[OptsConfigFiller(cmd_line_args)])
Exemple #15
0
def gc_create_config(cmd_line_args):
	(_, args) = gc_cmd_line_parser(cmd_line_args)
	return createConfig(configFile = args[0], additional = [OptsConfigFiller(cmd_line_args)])
	def loadFromFile(path):
		config = createConfig(useDefaultFiles = False, configDict = {'dataset': {
			'nickname check consistency': 'False', 'nickname check collision': 'False'}})
		return DataProvider.createInstance('ListProvider', config, path)