Beispiel #1
0
TASK_NAME = 'UNet_training_generator_{}epochs'.format(N_EPOCHS)
TASK_FOLDER_PATH = os.path.join(CHECKPOINT_FOLDER_PATH, TASK_NAME)

if not os.path.exists(TASK_FOLDER_PATH):
    os.makedirs(TASK_FOLDER_PATH)

#TRAINING_WEIGHTS_FILEPATH = os.path.join(TASK_FOLDER_PATH,
#                                         '{}_weights_training{}.hdf5'.format(model.name, TASK_NAME))

TRAINING_WEIGHTS_FILEPATH = os.path.join(CHECKPOINT_FOLDER_PATH,
                                         'retrained_UNet_500+250epochs.hdf5')

fname_test = [os.path.join(TRAIN_VAL_TEST_DIR, "Xy_test.npz")]

model.load_weights(TRAINING_WEIGHTS_FILEPATH)
prediction_steps, n_evts_test = get_n_iterations(fname_test,
                                                 batch_size=BATCH_SIZE)
print("prediction steps per epoch:{}, n events:{}".format(
    prediction_steps, n_evts_test))

print('INFERENCE STEP')
parallel_model = multi_gpu_model(model, gpus=2)

test_data_gen = data_generator(fname_test,
                               batch_size=BATCH_SIZE,
                               ftarget=lambda y: y)


def inference_step(network_model, test_data_generator, predict_steps):

    y_pred = list()
Beispiel #2
0
TRAINING_WEIGHTS_FILEPATH = os.path.join(
    TASK_FOLDER_PATH,
    '{}_weights_training{}.hdf5'.format(model.name, TASK_NAME))

HISTORY_FILEPATH = os.path.join(
    TASK_FOLDER_PATH, '{}_history{}.pkl'.format(model.name, TASK_NAME))

MODEL_JSON_FILEPATH = os.path.join(TASK_FOLDER_PATH,
                                   '{}.json'.format(model.name))

fname_train = [
    os.path.join(TRAIN_VAL_TEST_DIR, "Xy_train_stratified_dist.npz")
]
fname_val = [os.path.join(TRAIN_VAL_TEST_DIR, "Xy_val_stratified_dist.npz")]

steps_per_epoch, n_events = get_n_iterations(fname_train,
                                             batch_size=BATCH_SIZE)
print("training steps per epoc:{}, number of events:{}".format(
    steps_per_epoch, n_events))

validation_steps, n_evts_val = get_n_iterations(fname_val,
                                                batch_size=BATCH_SIZE)
print("validation steps per epoch:{}, number of events:{}".format(
    validation_steps, n_evts_val))


def ohe(values):

    values_reshaped = values.reshape(-1, 1)
    onehot_encoder = OneHotEncoder(sparse=False)
    onehot_encoded = onehot_encoder.fit_transform(values_reshaped)
    return onehot_encoded