Beispiel #1
0
from keras.models import Model, load_model
from subtlenet import config
import subtlenet.generators.gen as gen
from paths import basedir
from subtlenet.backend.layers import *

gen.truncate = int(argv[1])
config.limit = int(argv[2])
name = 'dense'
print 'inferring', name
shallow = load_model('dense_models/classifier_v4_trunc%i_limit%i_best.h5' %
                     (gen.truncate, config.limit),
                     custom_objects={'DenseBroadcast': DenseBroadcast})

coll = gen.make_coll(basedir + '/PARTITION/*_CATEGORY.npy')

msd_norm_factor = 1. / config.max_mass
pt_norm_factor = 1. / (config.max_pt - config.min_pt)
msd_index = config.gen_singletons['msd']
pt_index = config.gen_singletons['pt']


def predict_t(data):
    msd = data['singletons'][:, msd_index] * msd_norm_factor
    pt = (data['singletons'][:, pt_index] - config.min_pt) * pt_norm_factor
    if msd.shape[0] > 0:
        if config.limit:
            particles = data['particles'][:, :config.limit, :gen.truncate]
        else:
            particles = data['particles'][:, :, :gen.truncate]
Beispiel #2
0
from paths import basedir, figsdir

### some global definitions ### 

NEPOCH = 20
TRAIN_BASELINE = False
generator.truncate = int(argv[1])
config.limit = int(argv[2])
config.bin_decorr = False
APOSTLE = 'v4_trunc%i_limit%i'%(generator.truncate, config.limit)
modeldir = 'mse_adversary/'
system('mkdir -p %s'%modeldir)
system('cp %s %s/train_%s.py'%(argv[0], modeldir, APOSTLE))

### instantiate data loaders ### 
top = make_coll(basedir + '/PARTITION/Top_*_CATEGORY.npy')
qcd = make_coll(basedir + '/PARTITION/QCD_*_CATEGORY.npy')

data = [top, qcd]
dims = get_dims(top)

### first build the classifier! ###

# set up data 
opts = {
        'learn_mass' : True,
        'learn_pt' : True,
        'decorr_mass':False
       }
classifier_train_gen = generate(data, partition='train', batch=1000, **opts)
classifier_validation_gen = generate(data, partition='validate', batch=10000, **opts)
Beispiel #3
0
OUTPUT = figsdir + '/'
system('mkdir -p %s' % OUTPUT)

components = [
    'singletons',
    'shallow',
    'baseline2_7_100',
    'kltest_7_100',
    'categorical_crossentropy2_7_100',
    'categorical_crossentropytest2_7_100',
    'categorical_crossentropytesttest2_7_100',
]

colls = {
    't':
    make_coll(basedir + '/PARTITION/Top_*_CATEGORY.npy',
              categories=components),
    'q':
    make_coll(basedir + '/PARTITION/QCD_*_CATEGORY.npy',
              categories=components),
}


# run DNN
def predict(data, model):
    return data[model]


def access(data, v):
    return data['singletons'][:, config.gen_singletons[v]]

Beispiel #4
0
#              'trunc4_limit50_best', 
              'trunc7_limit100_best', 
              ]
components_gen = [
              'singletons',
              'shallow_best', 
#              'baseline_trunc4_limit50_best', 
              'baseline_Adam_7_100', 
              ]

basedir = '/local/snarayan/genarrays/v_deepgen_4_finegrid_small'
basedir_gen = '/fastscratch/snarayan/genarrays/v_deepgen_4_small/'


colls = {
    't' : make_coll(basedir + '/PARTITION/Top_*_CATEGORY.npy',categories=components),
    'q' : make_coll(basedir + '/PARTITION/QCD_*_CATEGORY.npy',categories=components),
}

colls_gen = {
    't' : make_coll(basedir_gen + '/PARTITION/Top_*_CATEGORY.npy',categories=components_gen),
    'q' : make_coll(basedir_gen + '/PARTITION/QCD_*_CATEGORY.npy',categories=components_gen),
}


# run DNN
def predict(data,model):
    return data[model]

def access(data, v):
    return data['singletons'][:,config.gen_singletons[v]]
Beispiel #5
0
              'shallow_nopt', 
              'dense',
#              'trunc4_limit10_best', 
#              'trunc7_limit10_best', 
              'trunc4_limit50_best', 
              'smeared_trunc7_limit50_best', 
#              'baseline_trunc4_limit50_best', 
#              'trunc7_limit50_best', 
#              'dense_trunc7_limit50_best', 
#              'dense_trunc4_limit100_best', 
#              'trunc7_limit100_best', 
#              'trunc4_limit100_best',
              ]

colls = {
    't' : make_coll(paths.basedir + '/PARTITION/Top_*_CATEGORY.npy',categories=components),
    'q' : make_coll(paths.basedir + '/PARTITION/QCD_*_CATEGORY.npy',categories=components),
}


# run DNN
def predict(data,model):
    return data[model]

def access(data, v):
    return data['singletons'][:,config.gen_singletons[v]]

def div(data, num, den):
    return access(data, num) / np.clip(access(data, den), 0.0001, 999)

f_vars = {
Beispiel #6
0
]

components_base = map(lambda x: x.replace('baseline', 'baseline_Adam'),
                      components)

basedir = {
    #            'base' : '/data/t3serv014/snarayan/deep/v_deepgen_4_eta5_small/',
    '0p02': '/local/snarayan/genarrays/v_deepgen_4_noetaphi_small/',
    #            '0p001' : '/local/snarayan/genarrays/v_deepgen_4_finegrid_small/',
}
basedir_base = '/fastscratch/snarayan/genarrays/v_deepgen_4_small/'

colls = {}
for d, b in basedir.iteritems():
    colls[d] = {
        't': make_coll(b + '/PARTITION/Top_*_CATEGORY.npy',
                       categories=components),
        'q': make_coll(b + '/PARTITION/QCD_*_CATEGORY.npy',
                       categories=components),
    }

colls_base = {
    't':
    make_coll(basedir_base + '/PARTITION/Top_*_CATEGORY.npy',
              categories=components_base),
    'q':
    make_coll(basedir_base + '/PARTITION/QCD_*_CATEGORY.npy',
              categories=components_base),
}


# run DNN