def run_model(infilepattern, outfilepath):
    N = len(glob(infilepattern.replace('%i', '*')))
    predictions = []
    for i in xrange(N):
        infilepath = infilepattern % i
        data = tree_to_arrays(infilepath)
        normalize_arrays(data, 'particles', infilepath)
        utils.print_time('preprocessing ' + infilepath)
        if not STORE:
            utils.cleanup(infilepath)
Beispiel #2
0
def run_model(infilepattern, outfilepath):
    N = len(glob(infilepattern.replace('%i', '*')))
    predictions = []
    for i in xrange(N):
        infilepath = infilepattern % i
        data = tree_to_arrays(infilepath)
        normalize_arrays(data, 'pf', infilepath)
        normalize_arrays(data, 'sv', infilepath)
        utils.print_time('preprocessing')
        if INFER:
            pred = infer(data)
            if pred.shape[0]:
                predictions.append(pred)
            utils.print_time('inference')
        if not STORE:
            utils.cleanup(infilepath)
    if INFER:
        if predictions:
            pred = np.concatenate(predictions)
        else:
            pred = np.array([])
        arrays_to_tree(outfilepath, pred)
        utils.print_time('saving prediction')
Beispiel #3
0
            break
    if not to_run:
        logger.error(sname, 'Could not find a job for PROCID=%i' % (which))
        exit(3)

    outdir = getenv('SUBMIT_OUTDIR')
    lockdir = getenv('SUBMIT_LOCKDIR')
    outfilename = to_run.name + '_%i.root' % (submit_id)
    processed = {}

    utils.report_start(outdir, outfilename, to_run.files)

    wd = utils.isolate()
    utils.main(to_run, processed, fn)

    utils.hadd(processed.keys())
    utils.print_time('hadd')

    ret = utils.stageout(outdir, outfilename)
    utils.cleanup('*.root')
    utils.un_isolate(wd)
    utils.print_time('stageout and cleanup')
    if not ret:
        utils.report_done(lockdir, outfilename, processed)
        utils.cleanup('*.lock')
        utils.print_time('create lock')
    else:
        exit(-1 * ret)

    exit(0)
Beispiel #4
0
    if not to_run:
        PError(sname, 'Could not find a job for PROCID=%i' % (which))
        exit(3)

    outdir = getenv('SUBMIT_OUTDIR')
    lockdir = getenv('SUBMIT_LOCKDIR')
    outfilename = to_run.name + '_%i.root' % (submit_id)
    processed = {}

    utils.main(to_run, processed, fn)

    utils.hadd(processed.keys())
    if deep_utils.STORE and False:
        utils.hadd([x.replace('output_', '') for x in glob('*pf*.root')],
                   'arrays.root')
        utils.cleanup('*pf*.root')
    utils.print_time('hadd')

    add_bdt()
    utils.print_time('bdt')

    # utils.record_inputs('output.root',processed)
    # utils.print_time('record inputs')

    ret = utils.stageout(outdir, outfilename)
    if deep_utils.STORE and False:
        utils.stageout(outdir, outfilename.replace('.root', '_arrays.root'),
                       'arrays.root')
    utils.cleanup('*.root')
    if deep_utils.SAVE:
        data = {}
Beispiel #5
0
            to_run = s
            break
    if not to_run:
        logger.error(sname, 'Could not find a job for PROCID=%i' % (which))
        exit(3)

    outdir = getenv('SUBMIT_OUTDIR')
    lockdir = getenv('SUBMIT_LOCKDIR')
    outfilename = to_run.name + '_%i.root' % (submit_id)
    processed = {}

    utils.report_start(outdir, outfilename, to_run.files)

    wd = utils.isolate()
    utils.main(to_run, processed, fn)

    utils.hadd(processed.keys())
    utils.print_time('hadd')

    ret = utils.stageout(outdir, outfilename)
    utils.cleanup('*.root')
    utils.un_isolate(wd)
    utils.print_time('stageout and cleanup')
    if not ret:
        utils.report_done(lockdir, outfilename, processed)
        utils.print_time('create lock')
    else:
        exit(-1 * ret)

    exit(0)