def main(args): jsons = { 'train': 'dump/train_si284/deltafalse/data.json', 'dev': 'dump/test_dev93/deltafalse/data.json', 'test': 'dump/test_eval92/deltafalse/data.json' } spk2genders = { 'train': 'train_si284/spk2gender', 'dev': 'test_dev93/spk2gender', 'test': 'test_eval92/spk2gender' } utils.safe_copytree(args.data_root, args.temp_root) if not os.path.exists(args.model_dir): os.makedirs(args.model_dir) logging.basicConfig(filename=os.path.join(args.model_dir, args.log_file), filemode='a', level=logging.INFO) torch.manual_seed(args.seed) torch.backends.cudnn.deterministic = args.determ np.random.seed(args.seed) if not args.eval_only: train(args, jsons, spk2genders) evaluate(args, jsons, spk2genders) if args.cleanup: utils.safe_rmtree(args.temp_root)
def main(args): ''' Training and evaluation script for character-based CTC ASR on WSJ dataset, pre-processed by ESPnet toolkit ''' jsons = { 'train': 'dump/train_si284/deltafalse/data.json', 'dev': 'dump/test_dev93/deltafalse/data.json', 'test': 'dump/test_eval92/deltafalse/data.json' } # if the temporary directory contains a json, we'll assume it's correct if not os.path.exists(os.path.join(args.temp_root, jsons['train'])): # copy the data for faster reading than NFS utils.safe_copytree(args.data_root, args.temp_root) # if model_dir is specified, and it doesn't contain the log file, make it log_file = os.path.join(args.model_dir, args.log_file) if args.model_dir is not None and not os.path.exists(log_file): utils.safe_makedirs(args.model_dir) logging.basicConfig(filename=log_file, filemode='a', level=logging.INFO) torch.manual_seed(args.seed) torch.backends.cudnn.deterministic = not args.nondeterm np.random.seed(args.seed) if not args.eval_only: utils.safe_json_dump(vars(args), os.path.join(args.model_dir, 'args.json')) epoch_stats = train(args, jsons) utils.safe_json_dump(epoch_stats, os.path.join(args.model_dir, 'epoch_stats.json')) if args.eval_only: data_root, temp_root = args.data_root, args.temp_root test, cpu, seed, cleanup = args.test, args.cpu, args.seed, args.cleanup with open(os.path.join(args.model_dir, 'args.json'), 'r') as f: json_dict = json.load(f) args = argparse.Namespace(**json_dict) args.data_root, args.temp_root = data_root, temp_root args.test, args.cpu, args.seed, args.cleanup = test, cpu, seed, cleanup evaluate(args, jsons) if args.cleanup: utils.safe_rmtree(args.temp_root)
for file_path in files_2b_copied: file_name = os.path.basename(file_path) shutil.copyfile(file_path, os.path.join(eval_result_dirpath, file_name)) print("file '%s' is copyed into '%s'" % (file_name,eval_result_dirpath)) import sys,pathlib from fp import pipe,cmap,cfilter if __name__ == '__main__': ''' python evaluator.py segnet.h5 imgs_dir output_dir ''' segnet_model_path = sys.argv[1] imgs_dir = sys.argv[2] output_dir = sys.argv[3] utils.safe_copytree(imgs_dir, output_dir,['*.*']) segnet = model.unet(segnet_model_path, (None,None,1)) f = pipe(utils.file_paths, cmap(lambda path: (cv2.imread(path,0), path)), cfilter(lambda img_path: img_path[0] is not None), cmap(lambda img_path: (utils.bgr_float32(img_path[0]), img_path[1]) ), cmap(lambda im_p: (im_p[0].reshape((1,)+im_p[0].shape), im_p[1]) ), cmap(lambda im_p: (inference(segnet,im_p[0]), im_p[1]))) old_parent_dir = pathlib.Path(imgs_dir).parts[-1] for segmap_list, img_path in f(imgs_dir): new_path = utils.make_dstpath(img_path, old_parent_dir, output_dir) segmap = segmap_list[0] segmap = (segmap.reshape(segmap.shape[:2]) * 255).astype(np.uint8)
create new argv[2] directory(tree structure preserved), separate images in argv[1], and move rgb imgs to new directory. ex) python separator.py ./data/examples/ ./data/rgb ^~~~~~~~~~~~~~~~ ^~~~~~~~~~ origin img dir new directory for rgb imgs. ''') is_grayscale = (lambda img: np.all(img[:, :, 0] == img[:, :, 1]) and np.all( img[:, :, 1] == img[:, :, 2])) if __name__ == '__main__': mixed_imgs_path = sys.argv[1] rgb_imgs_path = sys.argv[2] utils.safe_copytree(mixed_imgs_path, rgb_imgs_path, ('*.jpg', '*.jpeg', '*.png')) f = pipe(utils.file_paths, cmap(lambda path: (cv2.imread(path), path)), cfilter(lambda img_path: img_path[0] is not None), cfilter(lambda img_path: not is_grayscale(img_path[0]))) old_parent_dir = pathlib.Path(mixed_imgs_path).parts[-1] timer = utils.ElapsedTimer('moving in') for img, img_path in f(mixed_imgs_path): new_path = utils.make_dstpath(img_path, old_parent_dir, rgb_imgs_path) #print(img_path, old_parent_dir, new_path) shutil.move(img_path, new_path) timer.elapsed_time()
#print(y,x) #cv2.imshow('img',img[y:y+h, x:x+w]); cv2.waitKey(0) yield '%s_%d_%d.png' % (img_name,y,x), img[y:y+h, x:x+w] ''' if __name__ == '__main__': def path2path_img(path): return (path, cv2.imread(path)) crop_size = int(sys.argv[1]) imgs_dir = sys.argv[2] pieces_dir = sys.argv[3] utils.safe_copytree(imgs_dir, pieces_dir, ['*.jpg', '*.jpeg', '*.png']) timer = utils.ElapsedTimer('Total Cutting') #------------------------------------------------------------- pieces \ = pipe(utils.file_paths, cmap(path2path_img), cfilter(lambda path_img:path_img[1] is not None), cmap(lambda pair: path_img2path_pieces(pair,crop_size,imgs_dir,pieces_dir)), flatten)(imgs_dir) for path, img in pieces: #print(path) #print(path) #cv2.imwrite(path, img) cv2.imwrite(path, img)