print("creating data generators") train_generator = data_generator_index_list(fnames_list, index_list=index_list_train, batch_size=BATCH_SIZE, ftarget=lambda y: y, ) validation_generator = data_generator_index_list(fnames_list, index_list=index_list_val, batch_size=BATCH_SIZE, ftarget=lambda y: y, ) print("calculating steps per epoch") steps_per_epoch, n_events = get_n_iterations_index(fnames_list, index_list_train, batch_size=BATCH_SIZE) validation_steps, n_events = get_n_iterations_index(fnames_list, index_list_val, batch_size=BATCH_SIZE) ##X_train = np.load(os.path.join(data_dir,"Xy_train.npz"))["x"] # y_train = np.load(os.path.join(data_dir,"Xy_train.npz"))["y"] print("retraining model") TRAINING_WEIGHTS_FILEPATH = os.path.join(TASK_FOLDER_PATH, 'retrained_UNet_1500_epochs_clean_300.hdf5') model_1500 = get_unet() model_1500.load_weights(TRAINING_WEIGHTS_FILEPATH) hist_2000 = train_neural_network(model_1500, train_generator, steps_per_epoch, validation_generator, validation_steps, epochs=500) # model.load_weights(os.path.join(data_dir,"trained_models/retrained_UNet_500+250+250epochs.hdf5")) # model.fit(x=X_train, y=y_train, epochs=250, batch_size=1, verbose=1, validation_split=.2) print("saving trained model") model_1500.save(os.path.join(TASK_FOLDER_PATH, "retrained_UNet_2000_epochs_clean_300.hdf5")) pickle.dump(hist_2000, open(os.path.join(TASK_FOLDER_PATH, "hist_retrained_UNet_2000_epochs_clean_300.pkl"), 'wb')) print("keep training for 500 more epochs") hist_2500 = train_neural_network(model_1500, train_generator, steps_per_epoch, validation_generator, validation_steps, epochs=500) model_1500.save(os.path.join(TASK_FOLDER_PATH, "retrained_UNet_2500_epochs_clean_300.hdf5")) pickle.dump(hist_2500, open(os.path.join(TASK_FOLDER_PATH, "hist_retrained_UNet_2500_epochs_clean_300.pkl")), 'wb') print("keep training for 500 more epochs") hist_3000 = train_neural_network(model_1500, train_generator, steps_per_epoch, validation_generator, validation_steps, epochs=500)
label_key='dist', batch_size=BATCH_SIZE, fdata=lambda y: y, ftarget=ohe) validation_generator = data_generator(fname_val, data_key='x', label_key='dist', batch_size=BATCH_SIZE, fdata=lambda y: y, ftarget=ohe) training_history = train_neural_network(model, training_generator, steps_per_epoch, validation_generator, validation_steps, batch_size=BATCH_SIZE, epochs=N_EPOCHS) print('Saving Model (JSON), Training History & Weights...', end='') model_json_str = model.to_json() with open(MODEL_JSON_FILEPATH, 'w') as model_json_f: model_json_f.write(model_json_str) history_filepath = HISTORY_FILEPATH dump(training_history.history, open(history_filepath, 'wb')) model.save_weights(TRAINING_WEIGHTS_FILEPATH) print('...Done!')