Beispiel #1
0
NJ_cut_test_treevals = np.load(data_dir + 'NJ_{}_cut_{}_{}_{}.npy'.format(
    order, n_gluon + 2, delta_cut, test_points),
                               allow_pickle=True)
NJ_near_test_treevals = np.load(data_dir + 'NJ_{}_near_{}_{}_{}.npy'.format(
    order, n_gluon + 2, delta_near, test_points),
                                allow_pickle=True)

_, pairs = D_ij(mom=test_near_momenta[0], n_gluon=n_gluon)

if cores == 0:
    cores = len(pairs)

pairs, NJ_near_test_treevals_split = multiprocess_weighting(
    test_near_momenta, n_gluon, NJ_near_test_treevals, cores)

NN = Model((n_gluon + 2 - 1) * 4, test_near_momenta,
           NJ_near_test_treevals_split[0])
_, _, _, _, _, _, _, _ = NN.process_training_data()

model_nears = []
model_cuts = []

x_mean_nears = []
x_std_nears = []
y_mean_nears = []
y_std_nears = []

x_mean_cuts = []
x_std_cuts = []
y_mean_cuts = []
y_std_cuts = []
Beispiel #2
0
        order, n_gluon + 2, delta, subset_points, i)
    print('Looking for directory {}'.format(model_dir_new))
    if os.path.exists(model_dir_new) == False:
        os.mkdir(model_dir_new)
        print('Directory created')
    else:
        print('Directory already exists')

    print('Generating data subset')
    indices = np.random.randint(0, points, subset_points)
    subset_momenta = momenta[indices]
    subset_momenta_list = subset_momenta.tolist()
    subset_njet = NJ_treevals[indices]
    assert len(subset_momenta) == len(subset_njet)

    NN = Model((n_gluon + 2 - 1) * 4, subset_momenta_list, subset_njet)
    model, x_mean, x_std, y_mean, y_std = NN.fit(layers=[20, 40, 20],
                                                 lr=lr,
                                                 epochs=1000000)

    if model_dir != '':
        model.save(model_dir_new + '/model')
        metadata = {
            'x_mean': x_mean,
            'x_std': x_std,
            'y_mean': y_mean,
            'y_std': y_std
        }
        pickle_out = open(model_dir_new + "/dataset_metadata.pickle", "wb")
        pickle.dump(metadata, pickle_out)
        pickle_out.close()
    print('Using delta_renear and delta_recut values')
    cut_momenta, near_momenta, NJ_near_virt, NJ_cut_virt = cut_near_split(
        momenta, virt, delta_cut=delta_recut, delta_near=delta_renear)
    print('Testing on {} phase space points in total'.format(
        len(cut_momenta) + len(near_momenta)))

test_points = len(cut_momenta) + len(near_momenta)
_, pairs = D_ij(mom=near_momenta[0], n_gluon=n_gluon)

if cores == 0:
    cores = len(pairs)

pairs, NJ_near_virt_split = multiprocess_weighting(near_momenta, n_gluon,
                                                   NJ_near_virt, cores)

NN = Model((n_gluon + 2 - 1) * 4, near_momenta, NJ_near_virt_split[0])
_, _, _, _, _, _, _, _ = NN.process_training_data(moms=near_momenta,
                                                  labs=NJ_near_virt_split[0])

model_nears = []
model_cuts = []

x_mean_nears = []
x_std_nears = []
y_mean_nears = []
y_std_nears = []

x_mean_cuts = []
x_std_cuts = []
y_mean_cuts = []
y_std_cuts = []
        data_dir +
        'NJ_{}_{}_{}_{}'.format(order, n_gluon + 2, delta, test_points),
        NJ_treevals)

else:
    print('############### All njet files exist ###############')

NJ_treevals = np.load(data_dir + labs, allow_pickle=True)

if os.path.exists(model_dir) == False:
    os.mkdir(model_dir)
    print('Creating directory')
else:
    print('Directory already exists')

NN = Model((n_gluon + 2 - 1) * 4, momenta, NJ_treevals)
_, _, _, _, _, _, _, _ = NN.process_training_data()

models = []
x_means = []
y_means = []
x_stds = []
y_stds = []

for i in range(training_reruns):
    print('Working on model {}'.format(i))
    model_dir_new = model_dir + '/{}_{}_{}_{}_{}/'.format(
        order, n_gluon + 2, delta, points, i)
    print('Looking for directory {}'.format(model_dir_new))
    if os.path.exists(model_dir_new) == False:
        os.mkdir(model_dir_new)