TRAINING_WEIGHTS_FILEPATH = os.path.join(CHECKPOINT_FOLDER_PATH, 'retrained_UNet_500+250epochs.hdf5') fname_test = [os.path.join(TRAIN_VAL_TEST_DIR, "Xy_test.npz")] model.load_weights(TRAINING_WEIGHTS_FILEPATH) prediction_steps, n_evts_test = get_n_iterations(fname_test, batch_size=BATCH_SIZE) print("prediction steps per epoch:{}, n events:{}".format( prediction_steps, n_evts_test)) print('INFERENCE STEP') parallel_model = multi_gpu_model(model, gpus=2) test_data_gen = data_generator(fname_test, batch_size=BATCH_SIZE, ftarget=lambda y: y) def inference_step(network_model, test_data_generator, predict_steps): y_pred = list() for _ in tqdm(range(predict_steps)): X_batch, _ = next(test_data_generator) Y_batch_pred = network_model.predict_on_batch(X_batch) y_pred.append(Y_batch_pred.ravel()) y_pred = np.vstack(np.asarray(y_pred)) return y_pred
steps_per_epoch, n_events = get_n_iterations(fname_train, batch_size=BATCH_SIZE) print("training steps per epoc:{}, number of events:{}".format( steps_per_epoch, n_events)) validation_steps, n_evts_val = get_n_iterations(fname_val, batch_size=BATCH_SIZE) print("validation steps per epoch:{}, number of events:{}".format( validation_steps, n_evts_val)) # prediction_steps, n_evts_test = get_n_iterations(fname_test, batch_size=BATCH_SIZE) # print(prediction_steps, n_evts_test) training_generator = data_generator(fname_train, batch_size=BATCH_SIZE, ftarget=lambda y: y) validation_generator = data_generator(fname_val, batch_size=BATCH_SIZE, ftarget=lambda y: y) # data_dir = DATA_DIR_IH training_history = train_neural_network(model, training_generator, steps_per_epoch, validation_generator, validation_steps, batch_size=BATCH_SIZE, epochs=N_EPOCHS)
batch_size=BATCH_SIZE) print("validation steps per epoch:{}, number of events:{}".format( validation_steps, n_evts_val)) def ohe(values): values_reshaped = values.reshape(-1, 1) onehot_encoder = OneHotEncoder(sparse=False) onehot_encoded = onehot_encoder.fit_transform(values_reshaped) return onehot_encoded training_generator = data_generator(fname_train, data_key='x', label_key='dist', batch_size=BATCH_SIZE, fdata=lambda y: y, ftarget=ohe) validation_generator = data_generator(fname_val, data_key='x', label_key='dist', batch_size=BATCH_SIZE, fdata=lambda y: y, ftarget=ohe) training_history = train_neural_network(model, training_generator, steps_per_epoch, validation_generator, validation_steps,
steps_per_epoch, n_events = get_n_iterations(fname_train, batch_size=BATCH_SIZE) print(steps_per_epoch, n_events) # validation_steps, n_evts_val = get_n_iterations(fnames_val[:N_FILES], batch_size=BATCH_SIZE) # print(validation_steps, n_evts_val) prediction_steps, n_evts_test = get_n_iterations(fname_test, batch_size=BATCH_SIZE) print(prediction_steps, n_evts_test) validation_steps, n_evts_val = get_n_iterations(fname_test, batch_size=BATCH_SIZE) print(validation_steps, n_evts_val) training_generator = data_generator(fname_train, batch_size=BATCH_SIZE, ftarget=lambda y: y) validation_generator = data_generator(fname_val, batch_size=BATCH_SIZE, fdata=get_Time_Coord, ftarget=process_cosz) # data_dir = DATA_DIR_IH model = get_unet() model.summary() training_history = train_neural_network(model, training_generator, steps_per_epoch, validation_generator,