def setup_dataset(config, dataset): if dataset.lower() == 'true': utils.vprint('Registering dummy data provider data') dataSplitter = DummySplitter() else: dataSplitter = DataSplitter.loadStateForScript(dataset) DataParameterSource = ParameterSource.getClass('DataParameterSource') DataParameterSource.datasetsAvailable['data'] = DataParameterSource( config.getWorkPath(), 'data', None, dataSplitter, DataSplitProcessorTest())
def main(opts, args): if opts.gc or opts.json or opts.full: return lumi_expr(opts, args) if opts.job_json or opts.job_gc or opts.job_events: (config, jobDB) = initGC(args) workDir = config.getWorkPath() splitter = None try: splitter = DataSplitter.loadStateForScript(os.path.join(workDir, 'datamap.tar')) except Exception: pass return lumi_calc(opts, workDir, sorted(jobDB.getJobs(ClassSelector(JobClass.SUCCESS))), splitter)
def main(opts, args): if opts.gc or opts.json or opts.full: return lumi_expr(opts, args) if opts.job_json or opts.job_gc or opts.job_events: (config, jobDB) = initGC(args) workDir = config.getWorkPath() splitter = None try: splitter = DataSplitter.loadStateForScript( os.path.join(workDir, 'datamap.tar')) except Exception: pass return lumi_calc( opts, workDir, sorted(jobDB.getJobs(ClassSelector(JobClass.SUCCESS))), splitter)
def printError(curJ, curS, msg): if curJ != curS: logging.warning('%s in job %d (j:%s != s:%s)', msg, jobNum, curJ, curS) fail.add(jobNum) printError(events, splitInfo[DataSplitter.NEntries], 'Inconsistent number of events') printError(skip, splitInfo[DataSplitter.Skipped], 'Inconsistent number of skipped events') printError(files, splitInfo[DataSplitter.FileList], 'Inconsistent list of files') except Exception: logging.warning('Job %d was never initialized!', jobNum) if fail: logging.warning('Failed: ' + str.join('\n', imap(str, fail))) if (opts.partition_list is not None) or opts.partition_list_invalid or opts.partition_check: if len(args) != 1: utils.exitWithUsage(parser.usage('part')) splitter = DataSplitter.loadStateForScript(args[0]) if opts.partition_list_invalid: utils.printTabular([(0, 'Job')], partition_invalid(splitter)) if opts.partition_list is not None: if opts.partition_list: keyStrings = opts.partition_list.split(',') else: keyStrings = DataSplitter.enumNames keyList = lmap(DataSplitter.str2enum, keyStrings) if None in keyList: logging.warning('Available keys: %r', DataSplitter.enumNames) utils.printTabular([('jobNum', 'Job')] + lzip(keyList, keyStrings), partition_list(splitter, keyList)) if opts.partition_check:
'Inconsistent number of events') printError(skip, splitInfo[DataSplitter.Skipped], 'Inconsistent number of skipped events') printError(files, splitInfo[DataSplitter.FileList], 'Inconsistent list of files') except Exception: logging.warning('Job %d was never initialized!', jobNum) if fail: logging.warning('Failed: ' + str.join('\n', imap(str, fail))) if (opts.partition_list is not None) or opts.partition_list_invalid or opts.partition_check: if len(args) != 1: utils.exitWithUsage(parser.usage('part')) splitter = DataSplitter.loadStateForScript(args[0]) if opts.partition_list_invalid: utils.printTabular([(0, 'Job')], partition_invalid(splitter)) if opts.partition_list is not None: if opts.partition_list: keyStrings = opts.partition_list.split(',') else: keyStrings = DataSplitter.enumNames keyList = lmap(DataSplitter.str2enum, keyStrings) if None in keyList: logging.warning('Available keys: %r', DataSplitter.enumNames) utils.printTabular([('jobNum', 'Job')] + lzip(keyList, keyStrings), partition_list(splitter, keyList))