model.add(Conv2D(64, (3, 3), activation='relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Conv2D(64, (3, 3), activation='relu')) model.add(Conv2D(64, (3, 3), activation='relu')) model.add(Flatten()) model.add(Dense(128, activation='relu')) model.add(Dense(1, activation='sigmoid')) model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) # Run experiment experiment = Experiment(model=model, config=config, model_type="classification", experiment_name=search_name) experiment.run( normalize_image_data(images[train_idx]), labels[train_idx], normalize_image_data(images[val_idx]), labels[val_idx], ) experiment.save() id_param[experiment.id] = { 'batch_size': b_size, } search_path = get_git_root() + "experiments/searches/" with open(search_path + search_name + ".json", "w") as fp: json.dump(id_param, fp, indent=2)
model = Sequential() model.add( Conv2D(8, kernel_size=3, activation='relu', input_shape=images.shape[1:], padding='same')) model.add(Flatten()) model.add(Dense(1, activation='sigmoid')) model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) # Run experiment experiment = Experiment(model=model, config=config, model_type="classification", experiment_name="full_training_cnn_small") experiment.run( images, labels, ) experiment.save(save_model=True, save_indices=False) print("Finished experiment:", experiment.id) lpath = experiment.config['path_args']['models'] + "models.log" log = open(lpath, "a") log.write(experiment.id + ":\n") log.write(os.path.basename(__file__) + "\n") log.write(repr(config) + "\n") log.close()
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) models['cnn_deep'] = model # Run experiments for k, m in models.items(): print(m.summary()) experiment = Experiment(model=m, config=config, model_type="classification", experiment_name="full_training_classifier_" + k) if "logistic" in k or "dense" in k: experiment.run( images.reshape(images.shape[0], 256), labels, ) else: experiment.run( images, labels, ) print("Outputting model:", k) experiment.save() mpath = experiment.config['path_args']['models'] + experiment.id + ".h5" experiment.model.save(mpath) experiments[k] = experiment.id # Predict on experimental data and output results if "logistic" in k or "dense" in k: pred = experiment.model.predict(
prediction_model = tf.keras.Model(inputs=inputs, outputs=outputs) model.compile( optimizer='adam', loss='mse', ) prediction_model.compile( optimizer='adam', loss='mse', ) print(model.summary()) # Run experiment experiment = Experiment(model=model, config=config, model_type="regression", experiment_name=search_name) experiment.run( normalize_image_data(images[single_indices]), normalize_position_data(positions[single_indices])[:, :2], ) experiment.save() mpath = experiment.config['path_args']['models'] + experiment.id + ".h5" prediction_model.save(mpath) heatmaps, coords = prediction_model.predict( images[single_indices][experiment.indices['fold_0']['val_idx']]) np.save("dsnt_heatmaps_pred.npy", heatmaps) np.save("dsnt_coords_pred.npy", coords) print("Finished experiment.") print("Name:", search_name) print("id:", experiment.id)
model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=(16, 16, 1), padding=padding)) model.add(Conv2D(64, (3, 3), activation='relu', padding=padding)) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Conv2D(64, (3, 3), activation='relu', padding=padding)) model.add(Conv2D(64, (3, 3), activation='relu', padding=padding)) model.add(Flatten()) model.add(Dense(256, activation='relu')) model.add(Dense(1, activation='linear')) model.compile( loss='mse', optimizer='adam', ) print(model.summary()) # Run experiment experiment = Experiment( model=model, config=config, model_type="regression", experiment_name=search_name ) experiment.run( normalize_image_data(images[single_indices]), energies[single_indices, 0], ) experiment.save() mpath = experiment.config['path_args']['models'] + experiment.id + ".h5" model.save(mpath)
Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=(16, 16, 1), padding=padding)) model.add(Conv2D(64, (3, 3), activation='relu', padding=padding)) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Conv2D(64, (3, 3), activation='relu', padding=padding)) model.add(Conv2D(64, (3, 3), activation='relu', padding=padding)) model.add(Flatten()) model.add(Dense(256, activation='relu')) model.add(Dense(4, activation='linear')) model.compile( loss='mse', optimizer='adam', ) print(model.summary()) # Run experiment experiment = Experiment(model=model, config=config, model_type="regression", experiment_name=search_name) experiment.run( normalize_image_data(images[double_indices]), normalize_position_data(positions[double_indices]), ) experiment.save() mpath = experiment.config['path_args']['models'] + experiment.id + ".h5" model.save(mpath)