def wrapper(fn, pre_fn=None, post_fn=None): which = int(argv[1]) submit_id = int(argv[2]) sample_list = cb.read_sample_config('local.cfg', as_dict=False) to_run = None for s in sample_list: if which == s.get_id(): to_run = s break if not to_run: logger.error(_sname, 'Could not find a job for PROCID=%i' % (which)) exit(3) outdir = getenv('SUBMIT_OUTDIR') lockdir = getenv('SUBMIT_LOCKDIR') outfilename = to_run.name + '_%i.root' % (submit_id) processed = {} report_start(outdir, outfilename, to_run.files) wd = isolate() if pre_fn is not None: pre_fn() print_time('pre_fn') main(to_run, processed, fn) hadd(processed.keys()) print_time('hadd') if post_fn is not None: post_fn() print_time('post_fn') ret = stageout(outdir, outfilename) cleanup('*.root') un_isolate(wd) print_time('stageout and cleanup') if not ret: report_done(lockdir, outfilename, processed) cleanup('*.lock') print_time('create lock') else: exit(-1 * ret) exit(0)
logger.info(sname + '.fn', 'Starting to process ' + input_name) # now we instantiate and configure the analyzer a = analysis("l1", verbose=False) a.inpath = input_name a.outpath = utils.input_to_output(input_name) a.datapath = data_dir a.isData = isData utils.set_year(a, 2016) skimmer = root.pa.L1Analyzer(a) return utils.run_Analyzer(skimmer, isData, a.outpath) if __name__ == "__main__": sample_list = cb.read_sample_config('local.cfg', as_dict=False) to_run = None for s in sample_list: if which == s.get_id(): to_run = s break if not to_run: logger.error(sname, 'Could not find a job for PROCID=%i' % (which)) exit(3) outdir = getenv('SUBMIT_OUTDIR') lockdir = getenv('SUBMIT_LOCKDIR') outfilename = to_run.name + '_%i.root' % (submit_id) processed = {} utils.report_start(outdir, outfilename, to_run.files)