return np.array(new_data) else: fine_counter += 1 return data if __name__ == "__main__": current_dir = os.path.dirname(os.path.abspath(__file__)) print("Scanning system for data...") dynamics_path = os.path.abspath( os.path.join(current_dir, "../new_plotting/training")) # dynamics_path = os.path.abspath(os.path.join(current_dir, "../plotting/training")) dynamics_data = load_structured_directory_data( dynamics_path, progress_file_name="temp_dynamics_paths.npz", data_file_name="temp_dynamics_data.npz", load_data=True, force_rescan=False) # not_fine_counter = 0 # fix_counter = 0 # average data over hyper-parameters for dataset in dynamics_data: dataset_data = dynamics_data[dataset] for noise_type in dataset_data: noise_type_data = dataset_data[noise_type] for noise_level in noise_type_data: noise_level_data = noise_type_data[noise_level] # insert things according to hyper-param so that I can average over that
os.path.join(file_dir, "../new_plotting/training")) root_save_dir = os.path.join(file_dir, "../new_plotting/hypothesis_testing") temp_dir = os.path.join(file_dir, "temp") os.makedirs(root_save_dir, exist_ok=True) os.makedirs(temp_dir, exist_ok=True) progress_file_name = "temp_dynamics_paths.npz" data_file_name = "temp_dynamics_data.npz" progress_file_path = os.path.join(temp_dir, progress_file_name) data_file_path = os.path.join(temp_dir, data_file_name) dynamics_data = load_structured_directory_data( dynamics_path, progress_file_name=progress_file_path, data_file_name=data_file_path, load_data=True, force_rescan=True) ############################################################################ hyper_param_white_list = np.arange(31, 103) # hyper_param_white_list = [20, 16, 26, 8, 3, 22] ############################################################################ acc_thresholds = [ 0.3, ] # acc_thresholds = np.linspace(0.1, 1.0, 10) # average data over hyper-parameters for dataset in dynamics_data:
def write_paths_to_file(dictionary, keys=[]): for key in dictionary: if isinstance(dictionary[key], dict): write_paths_to_file(dictionary[key], keys + [key]) elif isinstance(dictionary[key], list): for path in dictionary[key]: model_path = os.path.abspath(path) epoch = int(model_path.split(".")[-2].split("_")[-1]) [noise_type, noise_level, hyperparam_index], init_index = keys, key if int(hyperparam_index) > 30: # write to file (append) with open(TEST_LIST_PATH, "a") as test_list_file: test_list_file.write( "{noise_type} {noise_level} {hyperparam_index} {init_index} {epoch} {model_path}\n".format( noise_type=noise_type, noise_level=noise_level, hyperparam_index=hyperparam_index, init_index=init_index, epoch=epoch, model_path=model_path ) ) else: raise ValueError("The dictionary provided to the write_final_epoch_path_to_file function was not in the correct format.") if __name__ == "__main__": temp_save_dir = os.path.join(file_dir, "temp_sensivitiy") os.makedirs(temp_save_dir, exist_ok=True) progress_file_path = os.path.join(temp_save_dir, "sensitivity_model_paths.npz") model_paths = load_structured_directory_data(MODEL_STATES_DIRECTORY, progress_file_name=progress_file_path, force_rescan=True) write_paths_to_file(model_paths)
plt.tight_layout() plt.savefig(name) plt.close() if __name__ == "__main__": # dataset = "cifar10" # dataset = "mnist" for dataset in ["mnist", "cifar10"]: root_dir = os.path.join(file_dir, "../new_plotting/sensitivity", dataset) temp_save_dir = os.path.join(file_dir, "temp_sensivitiy") os.makedirs(temp_save_dir, exist_ok=True) progress_file_path = os.path.join( temp_save_dir, "{}_sensitivity_model_paths.npz".format(dataset)) paths_to_results = load_structured_directory_data( root_dir, progress_file_name=progress_file_path, force_rescan=True) figures_dir = os.path.join(root_dir, "figures") os.makedirs(figures_dir, exist_ok=True) data = {} # data_loader = get_data(dataset, os.path.join(file_dir, "../data"), mode="inter_class") data_loader = get_data(dataset, os.path.join(file_dir, "../data"), mode="inter_class", re_generate=True) # plot_full_path(data_loader) # plot_path(data_loader) for noise_type in paths_to_results:
plt.tight_layout() plt.savefig(name) plt.close() if __name__ == "__main__": plot_variance_prop_bounds_with_init_sampling("dropout", 0.7, 11) current_dir = os.path.dirname(os.path.abspath(__file__)) plot_dir = os.path.abspath(os.path.join(current_dir, "../plotting/generalization_figures")) os.makedirs(plot_dir, exist_ok=True) print("Scanning system for data...") print("training data...") train_path = os.path.join(current_dir, "../", "plotting", "train") train_data = load_structured_directory_data(train_path, progress_file_name="temp_train_paths.npz", data_file_name="temp_train_data.npz", load_data=True) print("validation data...") validation_path = os.path.join(current_dir, "../", "plotting", "test") validation_data = load_structured_directory_data(validation_path, progress_file_name="temp_validation_paths.npz", data_file_name="temp_validation_data.npz", load_data=True) print("Processing training data...") for noise_type in train_data: # print("Noise type: {}".format(noise_type)) for noise_level in train_data[noise_type]: # print("Noise level: {}".format(noise_level)) new_init_dict = {} for hyperparam_index in train_data[noise_type][noise_level]: