Example #1
0
            break
    if not to_run:
        logger.error(sname, 'Could not find a job for PROCID=%i' % (which))
        exit(3)

    outdir = getenv('SUBMIT_OUTDIR')
    lockdir = getenv('SUBMIT_LOCKDIR')
    outfilename = to_run.name + '_%i.root' % (submit_id)
    processed = {}

    utils.report_start(outdir, outfilename, to_run.files)

    wd = utils.isolate()
    utils.main(to_run, processed, fn)

    utils.hadd(processed.keys())
    utils.print_time('hadd')

    ret = utils.stageout(outdir, outfilename)
    utils.cleanup('*.root')
    utils.un_isolate(wd)
    utils.print_time('stageout and cleanup')
    if not ret:
        utils.report_done(lockdir, outfilename, processed)
        utils.cleanup('*.lock')
        utils.print_time('create lock')
    else:
        exit(-1 * ret)

    exit(0)
Example #2
0
    utils.main(to_run, processed, fn)

    utils.hadd(processed.keys())
    if deep_utils.STORE and False:
        utils.hadd([x.replace('output_', '') for x in glob('*pf*.root')],
                   'arrays.root')
        utils.cleanup('*pf*.root')
    utils.print_time('hadd')

    add_bdt()
    utils.print_time('bdt')

    # utils.record_inputs('output.root',processed)
    # utils.print_time('record inputs')

    ret = utils.stageout(outdir, outfilename)
    if deep_utils.STORE and False:
        utils.stageout(outdir, outfilename.replace('.root', '_arrays.root'),
                       'arrays.root')
    utils.cleanup('*.root')
    if deep_utils.SAVE:
        data = {}
        for f in glob('*npz'):
            f_data = deep_utils.np.load(f)
            for k, v in f_data.iteritems():
                if k not in data:
                    data[k] = []
                if v.shape[0] > 0:
                    data[k].append(v)
        if len(data['pt']) > 0:
            merged_data = {