Example #1
0
def setup_dataset(config, dataset):
	if dataset.lower() == 'true':
		log.info('Registering dummy data provider data')
		dataSplitter = DummySplitter()
	else:
		dataSplitter = DataSplitter.loadPartitionsForScript(dataset)

	config = config.changeView(setSections = None)
	partProcessor = config.getCompositePlugin('partition processor',
		'TFCPartitionProcessor LocationPartitionProcessor MetaPartitionProcessor BasicPartitionProcessor',
		'MultiPartitionProcessor', cls = 'PartitionProcessor', onChange = None)
	ParameterSource.createInstance('DataParameterSource', config.getWorkPath(), 'data',
		None, dataSplitter, partProcessor, repository)
Example #2
0
def main(opts, args):
	if opts.gc or opts.json or opts.full:
		return lumi_expr(opts, args)

	if opts.job_json or opts.job_gc or opts.job_events:
		(config, jobDB) = initGC(args)
		workDir = config.getWorkPath()
		splitter = None
		try:
			splitter = DataSplitter.loadPartitionsForScript(os.path.join(workDir, 'datamap.tar'))
		except Exception:
			clear_current_exception()
		return lumi_calc(opts, workDir, sorted(jobDB.getJobs(ClassSelector(JobClass.SUCCESS))), splitter)
Example #3
0
def main(opts, args):
    if opts.gc or opts.json or opts.full:
        return lumi_expr(opts, args)

    if opts.job_json or opts.job_gc or opts.job_events:
        (config, jobDB) = initGC(args)
        workDir = config.getWorkPath()
        splitter = None
        try:
            splitter = DataSplitter.loadPartitionsForScript(
                os.path.join(workDir, 'datamap.tar'))
        except Exception:
            clear_current_exception()
        return lumi_calc(
            opts, workDir,
            sorted(jobDB.getJobs(ClassSelector(JobClass.SUCCESS))), splitter)
Example #4
0
def setup_dataset(config, dataset):
    if dataset.lower() == 'true':
        utils.vprint('Registering dummy data provider data')
        dataSplitter = DummySplitter()
    else:
        dataSplitter = DataSplitter.loadPartitionsForScript(dataset)

    config = config.changeView(setSections=None)
    partProcessor = config.getCompositePlugin(
        'partition processor',
        'TFCPartitionProcessor LocationPartitionProcessor MetaPartitionProcessor BasicPartitionProcessor',
        'MultiPartitionProcessor',
        cls='PartitionProcessor',
        onChange=None)
    DataParameterSource = ParameterSource.getClass('DataParameterSource')
    DataParameterSource.datasetsAvailable['data'] = DataParameterSource(
        config.getWorkPath(), 'data', None, dataSplitter, partProcessor)
Example #5
0
def setup_dataset(config, dataset):
    if dataset.lower() == 'true':
        log.info('Registering dummy data provider data')
        dataSplitter = DummySplitter()
    else:
        dataSplitter = DataSplitter.loadPartitionsForScript(dataset)

    config = config.changeView(setSections=None)
    partProcessor = config.getCompositePlugin(
        'partition processor',
        'TFCPartitionProcessor LocationPartitionProcessor MetaPartitionProcessor BasicPartitionProcessor',
        'MultiPartitionProcessor',
        cls='PartitionProcessor',
        onChange=None)
    ParameterSource.createInstance('DataParameterSource', config.getWorkPath(),
                                   'data', None, dataSplitter, partProcessor,
                                   repository)
Example #6
0
				def printError(curJ, curS, msg):
					if curJ != curS:
						logging.warning('%s in job %d (j:%s != s:%s)', msg, jobNum, curJ, curS)
						fail.add(jobNum)
				printError(events, splitInfo[DataSplitter.NEntries], 'Inconsistent number of events')
				printError(skip, splitInfo[DataSplitter.Skipped], 'Inconsistent number of skipped events')
				printError(files, splitInfo[DataSplitter.FileList], 'Inconsistent list of files')
			except Exception:
				logging.warning('Job %d was never initialized!', jobNum)
		if fail:
			logging.warning('Failed: ' + str.join('\n', imap(str, fail)))

if (opts.partition_list is not None) or opts.partition_list_invalid or opts.partition_check:
	if len(args) != 1:
		utils.exitWithUsage(parser.usage('part'))
	splitter = DataSplitter.loadPartitionsForScript(args[0])

	if opts.partition_list_invalid:
		utils.printTabular([(0, 'Job')], partition_invalid(splitter))

	if opts.partition_list is not None:
		if opts.partition_list in ('', 'all'):
			keyStrings = DataSplitter.enumNames
		else:
			keyStrings = opts.partition_list.split(',')
		keyList = lmap(DataSplitter.str2enum, keyStrings)
		if None in keyList:
			logging.warning('Available keys: %r', DataSplitter.enumNames)
		utils.printTabular([('jobNum', 'Job')] + lzip(keyList, keyStrings), partition_list(splitter, keyList))

	if opts.partition_check: