Ejemplo n.º 1
0
    with open(data_path + "/tracker_input.pickle", "rb") as f:
        tracker_input = pickle.load(f)
    Generator_vec = TrainedGenerator(path_to_meta=meta_path_vec,
                                     path_to_config=config_path_vec,
                                     gpu_options=gpu_options)
    time_it(data=tracker_input,
            generator=Generator_vec,
            batch_size=batch_size,
            mode=prefix + "Vector")
elif time_id == 1:
    model_path_im = "../../Results/B2Dmunu/{}/".format(model_im)
    meta_path_im = model_path_im + "TFGraphs/"
    config_path_im = model_path_im + "config.json"
    with open(data_path + "/Trained/PiplusLowerP_CWGANGP8_out_1.pickle",
              "rb") as f:
        cgan_images = np.clip(padding_zeros(pickle.load(f), top=4, bottom=4),
                              a_min=0,
                              a_max=6120).reshape([-1, 64, 64, 1]) / 6120
    Generator_im = TrainedIm2Im(path_to_meta=meta_path_im,
                                path_to_config=config_path_im,
                                gpu_options=gpu_options)
    time_it(data=cgan_images,
            generator=Generator_im,
            batch_size=batch_size,
            mode=prefix + "Image")
elif time_id == 2:
    model_path_direct = "../../Results/ServerTemp/B2DmunuTracker/1Good/BiCycleGANTracker13/"
    meta_path_direct = model_path_direct + "TFGraphs/"
    config_path_direct = model_path_direct + "config.json"
    with open(data_path + "/tracker_images.pickle", "rb") as f:
        tracker_images = padding_zeros(
Ejemplo n.º 2
0
        path_loading = "../Data/B2Dmunu/LargeSample"
        path_results = "../Results/B2Dmunu"
    else:
        path_loading = "../Data/B2Dmunu/Debug"
        path_results = "../Results/Test/B2Dmunu"

    ############################################################################################################
    # Data loading
    ############################################################################################################
    with open("{}/tracker_images.pickle".format(path_loading), "rb") as f:
        train_x = pickle.load(f)

    with open("{}/calo_images.pickle".format(path_loading), "rb") as f:
        train_y = pickle.load(f)
    image_shape = [64, 64, 1]
    train_x = padding_zeros(train_x, **padding).reshape([-1,
                                                         *image_shape])[:50000]
    train_y = padding_zeros(train_y, **padding).reshape([-1,
                                                         *image_shape])[:50000]

    energy_scaler = np.max(train_y)
    train_x /= energy_scaler
    train_y /= energy_scaler

    nr_test = int(min(0.1 * len(train_x), 100))

    test_x = train_x[-nr_test:]
    train_x = train_x[:-nr_test]
    test_y = train_y[-nr_test:]
    train_y = train_y[:-nr_test]

    nr_train = len(train_x)
Ejemplo n.º 3
0
    # x_test = x_test.reshape((len(x_test), np.prod(x_test.shape[1:])))
    # y_train = enc.fit_transform(y_train.reshape(-1, 1))
    # y_test = enc.transform(y_test.reshape(-1, 1))
    # gen_architecture = [
    #                     [tf.layers.dense, {"units": 256, "activation": tf.nn.relu}],
    #                     [tf.layers.dense, {"units": 512, "activation": tf.nn.relu}],
    #                     ]
    # critic_architecture = [
    #                     [tf.layers.dense, {"units": 512, "activation": tf.nn.relu}],
    #                     [tf.layers.dense, {"units": 256, "activation": tf.nn.relu}],
    #                     ]
    # inpt_dim = 784
    # image_shape=[28, 28, 1]

    ########### Image input
    x_train = padding_zeros(x_train, top=2, bottom=2, left=2, right=2)
    x_train = np.reshape(x_train, newshape=(-1, 32, 32, 1))
    x_test = padding_zeros(x_test, top=2, bottom=2, left=2, right=2)
    x_test = np.reshape(x_test, newshape=(-1, 32, 32, 1))
    x_train_log = [x_train[y_train.tolist().index(i)] for i in range(10)]
    x_train_log = np.reshape(x_train_log, newshape=(-1, 32, 32, 1))

    y_train = enc.fit_transform(y_train.reshape(-1, 1))
    y_test = enc.transform(y_test.reshape(-1, 1))

    gen_architecture = [
        [tf.layers.dense, {
            "units": 4 * 4 * 512,
            "activation": tf.nn.relu
        }],
        [reshape_layer, {
Ejemplo n.º 4
0
# axs[-1].set_xlabel(r"Tracker $E_T$")
# axs[-1].set_ylabel(r"HCAL $E_T$")
# plt.savefig(savefolder+"/cGAN_energy.png", dpi=300)


#####################################################################################################
# Generate example images
#####################################################################################################
nr_samples = 100
calo_images = data["Calo"][:nr_samples]*energy_scaler/1000
inputs = data["Tracker"][keep_cols].values[:nr_samples]
inputs = [[track] for track in inputs]
generated_images = Generator.generate_batches(inputs, batch_size=100)*energy_scaler/1000
plt_idxs = []

calo_images = padding_zeros(calo_images, top=2, bottom=2)
tracker_images = np.flip(padding_zeros(tracker_images, top=2, bottom=2)[:nr_samples] / 1000, axis=1)

def set_title_for(ax, im):
    im = im.reshape([1, 56, 64])
    energy, max_energy = get_energies(im), get_max_energy(im)
    ax.set_title("Energy: %.2f GeV\nMaximum Energy: %.2f GeV" % (energy, max_energy))

idxs = [1, 2, 7, 12, 17, 18]
for row, idx in enumerate(idxs):
    print(inputs[idx])
    max_e = np.max([np.max(calo_images[idx]), np.max(generated_images[idx])])
    fig, axs = plt.subplots(nrows=1, ncols=3, figsize=(10, 7))
    fig.subplots_adjust(left=0.05, bottom=0.02, right=0.999, top=0.999, wspace=0.3, hspace=0.4)
    axs[0].imshow(calo_images[idx], vmin=0, vmax=max_e)
    # set_title_for(ax=axs[0], im=calo_images[idx])
Ejemplo n.º 5
0
#####################################################################################################
# Data loadng
#####################################################################################################

simulation = "B2Dmunu"
path_loading = "../../Data/{}/LargeSample".format(simulation)
nr_sim = 1
image_shape = [56, 64]
padding = {"top": 2, "bottom": 2, "left":0, "right":0}


if simulation == "Piplus":
    data, scaler = init.load_processed_data(path_loading, mode="train")
    calo_images = data["Calo"]*scaler["Calo"] / 1000
    calo_images = padding_zeros(calo_images, **padding)
    with open(path_loading+"/tracker_images.pickle".format(nr_sim), "rb") as f:
        tracker_images = pickle.load(f)[data["Idx"]]
    tracker_events = pd.read_csv(path_loading+"/tracker_events.csv").loc[data["Idx"]]

    tracker_real_ET = tracker_events["real_ET"].values
    exclude = ["phi", "theta", "region"]
    columns = tracker_events.drop(exclude, axis=1).columns
    transformed_events = scaler["Tracker"].transform(tracker_events.drop(exclude, axis=1))
    transformed_events = pd.DataFrame(data=transformed_events, columns=columns, index=tracker_events.index)
    tracker_events = pd.concat([transformed_events, tracker_events[exclude]], axis=1)

elif simulation == "B2Dmunu":
    with open("../../Data/Piplus/LargeSample/ProcessedScaler.pickle", "rb") as f:
        scaler = pickle.load(f)
    with open(path_loading+"/calo_images.pickle", "rb") as f:
Ejemplo n.º 6
0
        "../../Data/B2Dmunu/TestingPurpose/Trained/PiplusLowerP_CWGANGP8_out_1.pickle",
        "rb") as f:
    gan_data = pickle.load(f)
with open("../../Data/B2Dmunu/TestingPurpose/tracker_images.pickle",
          "rb") as f:
    tracker_images = pickle.load(f)
with open("../../Data/B2Dmunu/TestingPurpose/tracker_events.pickle",
          "rb") as f:
    tracker_events = pickle.load(f)
    tracker_real_ET = tracker_events["real_ET"].apply(
        sum).to_numpy()[-nr_test_hist:]
with open("../../Data/Piplus/LargeSample/ProcessedScaler.pickle", "rb") as f:
    scaler = pickle.load(f)
    calo_scaler = scaler["Calo"]

mc_data_images_m = padding_zeros(mc_data[-nr_test_hist:], top=6,
                                 bottom=6).reshape(-1, 64, 64) / calo_scaler
gan_data_m = np.clip(padding_zeros(gan_data[-nr_test_hist:], top=4, bottom=4),
                     a_min=0,
                     a_max=calo_scaler) / calo_scaler
tracker_images_m = padding_zeros(
    tracker_images[-nr_test_hist:], top=6, bottom=6).reshape(
        [-1, *image_shape]) / calo_scaler

#####################################################################################################
# Model loading
#####################################################################################################

for model_idx, model_path in enumerate(model_paths):
    print("Working on {} / {}: {}...".format(model_idx + 1, len(model_paths),
                                             model_path))
    if os.path.exists(model_path + "/Evaluation.pdf"):
Ejemplo n.º 7
0
    os.mkdir(saving_path)

#####################################################################################################
# Data loading
#####################################################################################################
Generator = TrainedGenerator(path_to_meta=meta_path,
                             path_to_config=config_path)
with open(config_path, "r") as f:
    config = json.load(f)
    padding = config["padding"]
    keep_cols = config["keep_cols"]

data, scaler = init.load_processed_data(data_path=data_path,
                                        mode="test",
                                        return_scaler=True)
calo_images = padding_zeros(data["Calo"], **padding)
tracker_events = data["Tracker"]
tracker_events_list = []
for _, row in tracker_events.iterrows():
    tracker_events_list.append([row[keep_cols].tolist()])
tracker_events_list = np.array(tracker_events_list)
image_shape = calo_images.shape[1:]

#####################################################################################################
# Check for correctness
#####################################################################################################
# data_path = "../../Data/B2Dmunu/Debug"
# with open(data_path+"/calo_images.pickle", "rb") as f:
#     calo_images = pickle.load(f)
#     calo_images = padding_zeros(calo_images, **padding)
#     image_shape = calo_images.shape[1:]
Ejemplo n.º 8
0
    else:
        data_path = "/home/tneuer/Backup/Algorithmen/0TestData/image_to_image/fashion_mnist"
    with open("{}/train_images.pickle".format(data_path), "rb") as f:
        x_train_orig = (pickle.load(f)[0].reshape(-1, 28, 28, 1) / 255)[:20000]
        x_test_orig = x_train_orig[-100:]
        x_train_orig = x_train_orig[:-100]
    with open("{}/train_images_rotated.pickle".format(data_path), "rb") as f:
        y_train_orig = (pickle.load(f).reshape(-1, 28, 28, 1) / 255)[:20000]
        y_test_orig = y_train_orig[-100:]
        y_train_orig = y_train_orig[:-100]

    ########### Architecture

    x_train_orig = padding_zeros(x_train_orig,
                                 top=2,
                                 bottom=2,
                                 left=2,
                                 right=2)
    x_test_orig = padding_zeros(x_test_orig, top=2, bottom=2, left=2, right=2)
    y_train_orig = padding_zeros(y_train_orig,
                                 top=2,
                                 bottom=2,
                                 left=2,
                                 right=2)
    y_test_orig = padding_zeros(y_test_orig, top=2, bottom=2, left=2, right=2)
    inpt_dim = x_train_orig[0].shape
    opt_dim = y_train_orig[0].shape

    param_dict = {
        "adv_steps": [1],
        "architecture": ["keraslike"],
Ejemplo n.º 9
0
if __name__ == '__main__':
    ########### Image input
    import pickle
    if "lhcb_data2" in os.getcwd():
        data_path = "../../Data/fashion_mnist"
    else:
        data_path = "/home/tneuer/Backup/Algorithmen/0TestData/image_to_image/fashion_mnist"
    with open("{}/train_images.pickle".format(data_path), "rb") as f:
        x_train = pickle.load(f)[0].reshape(-1, 28, 28, 1) / 255
    with open("{}/train_images_rotated.pickle".format(data_path), "rb") as f:
        y_train = pickle.load(f).reshape(-1, 28, 28, 1) / 255


    ########### Architecture

    x_train = padding_zeros(x_train, top=2, bottom=2, left=2, right=2)
    y_train = padding_zeros(y_train, top=2, bottom=2, left=2, right=2)
    gen_xy_architecture = [
                        [conv2d_logged, {"filters": 32, "kernel_size": 5, "strides": 1, "padding": "same", "activation": tf.nn.leaky_relu}],
                        [tf.layers.max_pooling2d, {"pool_size": 2, "strides": 2}],
                        # [unet_original, {"depth": 2, "activation": tf.nn.leaky_relu}],
                        # [inception_block, {"filters": 32}],
                        [conv2d_transpose_logged, {"filters": 32, "kernel_size": 2, "strides": 2, "activation": tf.nn.leaky_relu}],
                        [conv2d_logged, {"filters": 1, "kernel_size": 2, "strides": 1, "padding":"same", "activation": tf.nn.sigmoid}]
                        ]
    disc_xy_architecture = [
                        # [tf.layers.conv2d, {"filters": 32, "kernel_size": 2, "strides": 2, "activation": tf.nn.leaky_relu}],
                        # [residual_block, {"filters": 32, "kernel_size": 2, "activation": tf.nn.leaky_relu, "skip_layers": 3}],
                        # [tf.layers.conv2d, {"filters": 64, "kernel_size": 2, "strides": 2, "activation": tf.nn.leaky_relu}],
                        # [residual_block, {"filters": 128, "kernel_size": 2, "activation": tf.nn.leaky_relu, "skip_layers": 3}],
                        # [inception_block, {"filters": 32}],
Ejemplo n.º 10
0
    os.mkdir(path_saving + "/Evaluation/Cells")
    os.mkdir(path_saving + "/Evaluation/CenterOfMassX")
    os.mkdir(path_saving + "/Evaluation/CenterOfMassY")
    os.mkdir(path_saving + "/Evaluation/Energy")
    os.mkdir(path_saving + "/Evaluation/MaxEnergy")
    os.mkdir(path_saving + "/Evaluation/StdEnergy")
    os.mkdir(path_saving + "/ModelSave")

    ############################################################################################################
    # Data loading
    ############################################################################################################
    with open(
            "{}/Trained/PiplusLowerP_CWGANGP8_out_1.pickle".format(
                path_loading), "rb") as f:
        train_x = pickle.load(f)
        train_x = padding_zeros(train_x, **padding)

    with open("{}/calo_images.pickle".format(path_loading), "rb") as f:
        train_y = pickle.load(f)
        train_y = padding_zeros(train_y, top=6, bottom=6, left=0,
                                right=0).reshape([-1, 64, 64, 1])

    energy_scaler = np.max(train_y)
    train_x = np.clip(train_x, a_min=0, a_max=energy_scaler)
    train_x /= energy_scaler
    train_y /= energy_scaler

    nr_test = int(min(0.1 * len(train_x), 500))
    test_x = train_x[-nr_test:]
    train_x = train_x[:-nr_test]
    test_y = train_y[-nr_test:]
Ejemplo n.º 11
0
nr_samples = 20000
compare_to = "STP"

#####################################################################################################
# Data loading
#####################################################################################################

with open(config_path_vec, "r") as f:
    config = json.load(f)
    padding = config["padding"]
    keep_cols = config["keep_cols"]

energy_scaler = 6120
with open(data_path + "/calo_images.pickle", "rb") as f:
    calo_images = padding_zeros(pickle.load(f)[:nr_samples], top=6,
                                bottom=6) / 1000
with open(data_path + "/tracker_input.pickle", "rb") as f:
    tracker_input = pickle.load(f)[:nr_samples]
with open(data_path + "/tracker_events.pickle", "rb") as f:
    tracker_events = pickle.load(f)[:nr_samples]
    tracker_real_ET = tracker_events["real_ET"].apply(sum).to_numpy() / 1000
with open(data_path + "/Trained/PiplusLowerP_CWGANGP8_out_1.pickle",
          "rb") as f:
    cgan_images = pickle.load(f)[:nr_samples]
with open(data_path + "/tracker_images.pickle", "rb") as f:
    tracker_images = padding_zeros(
        pickle.load(f)[:nr_samples], top=6, bottom=6) / energy_scaler

assert tracker_input.shape[0] == calo_images.shape[0] == tracker_events.shape[
    0], ("Shape mismatch: {}, {}, {}.".format(tracker_input.shape,
                                              calo_images.shape,
Ejemplo n.º 12
0
    ############################################################################################################
    # Data loading
    ############################################################################################################
    if not os.path.exists(path_results):
        os.mkdir(path_results)

    path_saving = init.initialize_folder(algorithm=algorithm, base_folder=path_results)

    data, scaler = init.load_processed_data(path_loading, return_scaler=True)
    train_calo = data["train"]["Calo"][:nr_train]
    train_tracker = data["train"]["Tracker"][:nr_train]
    test_calo = data["test"]["Calo"]
    test_tracker = data["test"]["Tracker"]

    train_calo = padding_zeros(train_calo, **padding).reshape([-1, *image_shape])
    test_calo = padding_zeros(test_calo, **padding).reshape([-1, *image_shape])
    test_calo = test_calo[:nr_test]
    logging_calo = test_calo[:15]

    ##### Rescale and check that identical
    def invert_standardize_data(data, scaler, exclude=None):
        import pandas as pd
        standardized_data = data.drop(exclude, axis=1, inplace=False)
        colnames = standardized_data.columns.values
        standardized_data = pd.DataFrame(data=scaler.inverse_transform(standardized_data), columns=colnames, index=data.index)
        data = pd.concat([standardized_data, data[exclude]], axis=1, sort=False)
        return data

    train_tracker["real_ET"] = invert_standardize_data(data=train_tracker, scaler=scaler["Tracker"], exclude=["theta", "phi", "region"])["real_ET"]
    train_tracker["real_ET"] /= scaler["Calo"]