def main(argv): global config if not config.test: train(config) if config.weights == -1: config = add_to_config(config, 'weights', config.max_epoch) else: config = add_to_config(config, 'weights', config.max_epoch + config.weights) config = add_to_config(config, 'test', True) eval_alone(config)
def main(argv): global config with open(config.log_file, 'w') as f: print('STARTING') print('STARTING', file=f) if not config.test: train(config) if config.weights == -1: config = add_to_config(config, 'weights', config.max_epoch) else: config = add_to_config(config, 'weights', config.max_epoch + config.weights) config = add_to_config(config, 'test', True) eval_alone(config)
highest_model_saved = it ACC_LOGGER.plot(dest=config.log_dir) LOSS_LOGGER.plot(dest=config.log_dir) print("LOSS: ", np.mean(losses)) print("ACCURACY", np.mean(accs)) if __name__ == '__main__': config = get_config() caffe.set_device(0) caffe.set_mode_gpu() data_size = get_dataset_size(config, 'train') prepare_solver_file(data_size=data_size) solver = caffe.get_solver(config.solver) if not config.test: LOSS_LOGGER = Logger("{}_loss".format(config.name)) ACC_LOGGER = Logger("{}_acc".format(config.name)) train(config, solver) config = add_to_config(config, 'test', True) snapshot = get_highest_model(config) solver.restore(snapshot) caffemodel = os.path.splitext(snapshot)[0] + '.caffemodel' solver.net.copy_from(caffemodel) solver.test_nets[0].copy_from(caffemodel) print('Model restored') eval(config, solver)
config = get_config() with open(config.log_file, 'w') as f: print("STARTING CONVERSION", file=f) try: if config.dataset_type == "shapenet": from Shapenet import * elif config.dataset_type == "modelnet": from Modelnet import * categories, split, cat_names = get_metadata(config.data) write_cat_names(config.data, config.output) if config.dataset_type == "shapenet": files = get_files_list(config.data, categories) config = add_to_config(config, 'fov', 35) elif config.dataset_type == "modelnet": files = find_files(config.data, 'off') pool = Pool(processes=config.num_threads) #pool.map(off2obj, files) pool.close() pool.join() files = find_files(config.data, 'obj') config = add_to_config(config, 'fov', 70) config = add_to_config(config, 'cat_names', cat_names) except: e = sys.exc_info() with open(config.log_file, 'a') as f: print("Exception occured while reading files.", file=f) print("Exception {}".format(e), file=f) sys.exit(1)
predictions = list(pred_array) log( config.log_file, 'EVALUTAION: epoch: {0:^3d}, loss: {2:.6f}, acc: {3:.5f}'.format( epoch, loss, acc)) if not config.test: LOSS_LOGGER.log(loss, epoch, "eval_loss") ACC_LOGGER.log(acc, epoch, "eval_accuracy") else: import Evaluation_tools as et eval_file = os.path.join(config.log_dir, '{}.txt'.format(config.name)) et.write_eval_file(config.data, eval_file, predictions, labels, config.name) et.make_matrix(config.data, eval_file, config.log_dir) if __name__ == '__main__': config = get_config() if not config.test: LOSS_LOGGER = Logger("{}_loss".format(config.name)) ACC_LOGGER = Logger("{}_acc".format(config.name)) train(config) if config.weights == -1: config = add_to_config(config, 'weights', config.max_epoch) else: config = add_to_config(config, 'weights', config.max_epoch + config.weights) config = add_to_config(config, 'test', True) test(config)
elif config.dataset_type == "modelnet": from Modelnet import * categories, split, cat_names = get_metadata(config.data) write_cat_names(config.data, config.output) if config.dataset_type == "shapenet": files = get_files_list(config.data, categories) elif config.dataset_type == "modelnet": files = find_files(config.data, 'off') pool = Pool(processes=config.num_threads) pool.map(off2obj, files) pool.close() pool.join() files = find_files(config.data, 'obj') config = add_to_config(config, 'cat_names', cat_names) except: e = sys.exc_info() with open(config.log_file, 'a') as f: print("Exception occured while reading files.", file=f) print("Exception {}".format(e), file=f) sys.exit(1) def exists(file): id = get_file_id(file) cat = categories[id] cat_name = cat_names[cat] dataset = coding[split[id]] whole_path = os.path.join(config.output, cat_name, dataset, id) for view in range(config.num_views): if not os.path.exists(get_name_of_image_file(whole_path, id,
os.path.join(config.output, "{}.npz".format(dataset))) def log(file, log_string): with open(file, 'a') as f: print(log_string) print(log_string, file=f) if __name__ == '__main__': config = get_config() with open(config.log_file, 'w') as f: print("STARTING CONVERSION", file=f) try: ROT_MATRIX = create_ROT_MATRIX(config.num_rotations) config = add_to_config(config, 'matrix', ROT_MATRIX) if config.dataset_type == "shapenet": from Shapenet import * elif config.dataset_type == "modelnet": from Modelnet import * categories, split, cat_names = get_metadata(config.data) files = get_files_list(config.data, categories) write_cat_names(config.data, config.output) except: err_string = traceback.format_exc() log(config.log_file, "Exception occured while reading files.") log(config.log_file, err_string) sys.exit(1)
def cli(add, rm, show, all, dotfiles, configs, packages, fonts, old_path, new_path, remote, reinstall_all, reinstall_configs, reinstall_dots, reinstall_fonts, reinstall_packages, delete_config, destroy_backup, v): """ Easily back up installed packages, dotfiles, and more. You can edit which dotfiles are backed up in ~/.shallow-backup. Written by Aaron Lichtman (@alichtman). """ # Process CLI args admin_action = any([v, delete_config, destroy_backup, show, rm ]) or None not in add has_cli_arg = any([ old_path, all, dotfiles, packages, fonts, configs, reinstall_dots, reinstall_fonts, reinstall_all, reinstall_configs, reinstall_packages ]) skip_prompt = any([ all, dotfiles, configs, packages, fonts, reinstall_packages, reinstall_configs, reinstall_dots, reinstall_fonts ]) # Perform administrative action and exit. if admin_action: if v: print_version_info() elif delete_config: # TODO: Error checking. os.remove(get_config_path()) print_red_bold("Removed config file...") elif destroy_backup: backup_home_path = get_config()["backup_path"] destroy_backup_dir(backup_home_path) elif None not in add: add_to_config(add[0], add[1]) elif rm: rm_from_config(rm) elif show: show_config() sys.exit() # Start CLI splash_screen() safe_create_config() backup_config = get_config() # User entered a new path, so update the config if new_path: abs_path = os.path.abspath(new_path) print(Fore.BLUE + Style.NORMAL + "\nUpdating shallow-backup path to -> " + Style.BRIGHT + "{}".format(abs_path) + Style.RESET_ALL) backup_config["backup_path"] = abs_path write_config(backup_config) # User didn't enter any CLI args so prompt for path update before showing menu elif not has_cli_arg: prompt_for_path_update(backup_config) # Create backup directory and do git setup backup_home_path = get_config()["backup_path"] mkdir_warn_overwrite(backup_home_path) repo, new_git_repo_created = safe_git_init(backup_home_path) # Create default gitignore if we just ran git init if new_git_repo_created: safe_create_gitignore(backup_home_path) # Prompt user for remote URL if not remote: prompt_for_git_url(repo) # Set remote URL from CLI arg if remote: git_set_remote(repo, remote) dotfiles_path = os.path.join(backup_home_path, "dotfiles") configs_path = os.path.join(backup_home_path, "configs") packages_path = os.path.join(backup_home_path, "packages") fonts_path = os.path.join(backup_home_path, "fonts") # Command line options if skip_prompt: if reinstall_packages: reinstall_packages_sb(packages_path) elif reinstall_configs: reinstall_configs_sb(configs_path) elif reinstall_fonts: reinstall_fonts_sb(fonts_path) elif reinstall_dots: reinstall_dots_sb(dotfiles_path) elif reinstall_all: reinstall_all_sb(dotfiles_path, packages_path, fonts_path, configs_path) elif all: backup_all(dotfiles_path, packages_path, fonts_path, configs_path, skip=True) git_add_all_commit_push(repo, "all") elif dotfiles: backup_dotfiles(dotfiles_path, skip=True) git_add_all_commit_push(repo, "dotfiles") elif configs: backup_configs(configs_path, skip=True) git_add_all_commit_push(repo, "configs") elif packages: backup_packages(packages_path, skip=True) git_add_all_commit_push(repo, "packages") elif fonts: backup_fonts(fonts_path, skip=True) git_add_all_commit_push(repo, "fonts") # No CL options, show action menu and process selected option. else: selection = actions_menu_prompt().lower().strip() selection_words = selection.split() if selection.startswith("back up"): if selection_words[-1] == "all": backup_all(dotfiles_path, packages_path, fonts_path, configs_path) git_add_all_commit_push(repo, selection_words[-1]) elif selection_words[-1] == "dotfiles": backup_dotfiles(dotfiles_path) git_add_all_commit_push(repo, selection_words[-1]) elif selection_words[-1] == "configs": backup_configs(configs_path) git_add_all_commit_push(repo, selection_words[-1]) elif selection_words[-1] == "packages": backup_packages(packages_path) git_add_all_commit_push(repo, selection_words[-1]) elif selection_words[-1] == "fonts": backup_fonts(fonts_path) git_add_all_commit_push(repo, selection_words[-1]) elif selection.startswith("reinstall"): if selection_words[-1] == "packages": reinstall_packages_sb(packages_path) elif selection_words[-1] == "configs": reinstall_configs_sb(configs_path) elif selection_words[-1] == "fonts": reinstall_fonts_sb(fonts_path) elif selection_words[-1] == "dotfiles": reinstall_dots_sb(dotfiles_path) elif selection_words[-1] == "all": reinstall_all_sb(dotfiles_path, packages_path, fonts_path, configs_path) else: if selection == "show config": show_config() elif selection == "destroy backup": if prompt_yes_no( "Erase backup directory: {}?".format(backup_home_path), Fore.RED): destroy_backup_dir(backup_home_path) else: print_red_bold( "Exiting to prevent accidental deletion of backup directory." ) sys.exit()
config = get_config() prepare_data(os.path.join(config.data, 'train.txt'), views=config.num_views, shuffle=True) prepare_data(os.path.join(config.data, 'test.txt'), views=config.num_views, shuffle=False) caffe.set_device(0) caffe.set_mode_gpu() data_size = get_dataset_size(config, 'trainrotnet') prepare_solver_file(data_size=data_size) solver = caffe.get_solver(config.solver) if not config.test: LOSS_LOGGER = Logger("{}_loss".format(config.name)) ACC_LOGGER = Logger("{}_acc".format(config.name)) train(config, solver) config = add_to_config(config, 'weights', highest_model_saved) config = add_to_config(config, 'test', True) snapshot = get_highest_model(config) solver.restore(snapshot) caffemodel = os.path.splitext(snapshot)[0] + '.caffemodel' solver.net.copy_from(caffemodel) solver.test_nets[0].copy_from(caffemodel) print('Model restored') eval(config, solver)
print('Starting', file=f) prepare_data(os.path.join(config.data, 'train.txt'), views=config.num_views, shuffle=True) prepare_data(os.path.join(config.data, 'test.txt'), views=config.num_views, shuffle=False) caffe.set_device(0) caffe.set_mode_gpu() data_size = get_dataset_size(os.path.join(config.data, 'trainrotnet.txt')) prepare_solver_file(data_size=data_size) solver = caffe.get_solver(config.solver) if not config.test: LOSS_LOGGER = Logger("{}_loss".format(config.name)) ACC_LOGGER = Logger("{}_acc".format(config.name)) train(config, solver) config = add_to_config(config, 'weights', get_highest_model(config)) config = add_to_config(config, 'test', True) snapshot = get_highest_model(config) solver.restore(snapshot) caffemodel = os.path.splitext(snapshot)[0] + '.caffemodel' solver.net.copy_from(caffemodel) solver.test_nets[0].copy_from(caffemodel) log(config.log_file, 'Model restored') eval(config, solver)