break if not to_run: logger.error(sname, 'Could not find a job for PROCID=%i' % (which)) exit(3) outdir = getenv('SUBMIT_OUTDIR') lockdir = getenv('SUBMIT_LOCKDIR') outfilename = to_run.name + '_%i.root' % (submit_id) processed = {} utils.report_start(outdir, outfilename, to_run.files) wd = utils.isolate() utils.main(to_run, processed, fn) utils.hadd(processed.keys()) utils.print_time('hadd') ret = utils.stageout(outdir, outfilename) utils.cleanup('*.root') utils.un_isolate(wd) utils.print_time('stageout and cleanup') if not ret: utils.report_done(lockdir, outfilename, processed) utils.cleanup('*.lock') utils.print_time('create lock') else: exit(-1 * ret) exit(0)
for s in sample_list: if which == s.get_id(): to_run = s break if not to_run: PError(sname, 'Could not find a job for PROCID=%i' % (which)) exit(3) outdir = getenv('SUBMIT_OUTDIR') lockdir = getenv('SUBMIT_LOCKDIR') outfilename = to_run.name + '_%i.root' % (submit_id) processed = {} utils.main(to_run, processed, fn) utils.hadd(processed.keys()) if deep_utils.STORE and False: utils.hadd([x.replace('output_', '') for x in glob('*pf*.root')], 'arrays.root') utils.cleanup('*pf*.root') utils.print_time('hadd') add_bdt() utils.print_time('bdt') # utils.record_inputs('output.root',processed) # utils.print_time('record inputs') ret = utils.stageout(outdir, outfilename) if deep_utils.STORE and False: utils.stageout(outdir, outfilename.replace('.root', '_arrays.root'),