def test_refine_stage(args): torch.manual_seed(777) torch.cuda.manual_seed(777) eval_dataset = FlowRefine.FlowSeq(args, isTest=True) eval_dataloader = DataLoader(eval_dataset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.n_threads) if args.ResNet101: dfc_resnet101 = resnet_models.Flow_Branch(66, 4) dfc_resnet = nn.DataParallel(dfc_resnet101).cuda() else: dfc_resnet50 = resnet_models.Flow_Branch_Multi(input_chanels=66, NoLabels=4) dfc_resnet = nn.DataParallel(dfc_resnet50).cuda() dfc_resnet.eval() resume_iter = load_ckpt(args.PRETRAINED_MODEL, [('model', dfc_resnet)], strict=True) print('Load Pretrained Model from', args.PRETRAINED_MODEL) #task_bar = ProgressBar(eval_dataset.__len__()) for i, item in tqdm(enumerate(eval_dataloader), total=len(eval_dataset)): with torch.no_grad(): input_x = item[0].cuda() flow_masked = item[1].cuda() gt_flow = item[2].cuda() mask = item[3].cuda() output_dir = item[4][0] res_flow = dfc_resnet(input_x) res_flow_f = res_flow[:, :2, :, :] res_flow_r = res_flow[:, 2:, :, :] res_complete_f = res_flow_f * mask[:, 10:11, :, :] + flow_masked[:, 10:12, :, :] * (1. - mask[:, 10:11, :, :]) res_complete_r = res_flow_r * mask[:,32:34,:,:] + flow_masked[:,32:34,:,:] * (1. - mask[:,32:34,:,:]) output_dir_split = output_dir.split(',') output_file_f = os.path.join(args.output_root, output_dir_split[0]) output_file_r = os.path.join(args.output_root, output_dir_split[1]) output_basedir = os.path.dirname(output_file_f) if not os.path.exists(output_basedir): os.makedirs(output_basedir) res_save_f = res_complete_f[0].permute(1, 2, 0).contiguous().cpu().data.numpy() cvb.write_flow(res_save_f, output_file_f) res_save_r = res_complete_r[0].permute(1, 2, 0).contiguous().cpu().data.numpy() cvb.write_flow(res_save_r, output_file_r) #task_bar.update() sys.stdout.write('\n') dfc_resnet = None torch.cuda.empty_cache() print('Refined Results Saved in', args.output_root)
def extract_flow(args): from tools.infer_liteflownet import infer output_file = infer(args) flow_list = [x for x in os.listdir(output_file) if '.flo' in x] flow_start_no = min([int(x[:5]) for x in flow_list]) zero_flow = cvb.read_flow(os.path.join(output_file, flow_list[0])) cvb.write_flow(zero_flow*0, os.path.join(output_file, '%05d.rflo' % flow_start_no)) args.DATA_ROOT = output_file
def save_flow_2_grayimg(dir_flow_path, dir_grayimg_path): if not os.path.isdir(dir_flow_path): return "not a correct dir" for item in os.listdir(dir_flow_path): print item filename = dir_grayimg_path + item.split('.')[0] + '_img_u.jpg' cvb.write_flow(read_flow(dir_flow_path + item), filename, quantize=True)
def test_initial_stage(args): torch.manual_seed(777) torch.cuda.manual_seed(777) args.INITIAL_HOLE = True args.get_mask = True eval_dataset = FlowInitial.FlowSeq(args, isTest=True) eval_dataloader = DataLoader(eval_dataset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.n_threads) if args.ResNet101: dfc_resnet101 = resnet_models.Flow_Branch(33, 2) dfc_resnet = nn.DataParallel(dfc_resnet101).cuda() else: dfc_resnet50 = resnet_models.Flow_Branch_Multi(input_chanels=33, NoLabels=2) dfc_resnet = nn.DataParallel(dfc_resnet50).cuda() dfc_resnet.eval() resume_iter = load_ckpt(args.PRETRAINED_MODEL, [('model', dfc_resnet)], strict=True) print('Load Pretrained Model from', args.PRETRAINED_MODEL) task_bar = ProgressBar(eval_dataset.__len__()) for i, item in enumerate(eval_dataloader): with torch.no_grad(): input_x = item[0].cuda() flow_masked = item[1].cuda() mask = item[3].cuda() output_dir = item[4][0] res_flow = dfc_resnet(input_x) res_complete = res_flow * mask[:, 10: 11, :, :] + flow_masked[:, 10:12, :, :] * ( 1. - mask[:, 10:11, :, :]) output_dir_split = output_dir.split(',') output_file = os.path.join(args.output_root, output_dir_split[0]) output_basedir = os.path.dirname(output_file) if not os.path.exists(output_basedir): os.makedirs(output_basedir) res_save = res_complete[0].permute( 1, 2, 0).contiguous().cpu().data.numpy() cvb.write_flow(res_save, output_file) task_bar.update() print('Initial Results Saved in', args.output_root)
def flow(self): if self.i == -1: # initialization self.args.data_list = infer_liteflownet.generate_flow_list( self.args.frame_dir) print('====> Loading', self.args.pretrained_model_liteflownet) self.Flownet = LiteFlowNet(self.args.pretrained_model_liteflownet) self.Flownet.to(self.args.device) self.Flownet.eval() dataset_ = FlowInfer.FlowInfer(self.args.data_list, size=self.args.img_size) self.flow_dataloader = iter( DataLoader(dataset_, batch_size=1, shuffle=False, num_workers=0)) self.i += 1 complete = False with torch.no_grad(): try: f1, f2, output_path_ = next(self.flow_dataloader) f1 = f1.to(self.args.device) f2 = f2.to(self.args.device) flow = infer_liteflownet.estimate(self.Flownet, f1, f2) output_path = output_path_[0] output_file = os.path.dirname(output_path) os.makedirs(output_file, exist_ok=True) flow_numpy = flow[0].permute(1, 2, 0).data.cpu().numpy() cvb.write_flow(flow_numpy, output_path) except StopIteration: complete = True if self.i == len(self.flow_dataloader) - 1 or complete: print('LiteFlowNet Inference has been finished!') flow_list = [ x for x in os.listdir(self.args.flow_root) if '.flo' in x ] flow_start_no = min([int(x[:5]) for x in flow_list]) del self.flow_dataloader, self.Flownet zero_flow = cvb.read_flow( os.path.join(self.args.flow_root, flow_list[0])) cvb.write_flow( zero_flow * 0, os.path.join(self.args.flow_root, '%05d.rflo' % flow_start_no)) self.args.DATA_ROOT = self.args.flow_root self.i = -1 self.state += 1
def test_write_flow(): flow = np.random.rand(100, 100, 2).astype(np.float32) # write to a .flo file _, filename = tempfile.mkstemp() cvb.write_flow(flow, filename) flow_from_file = cvb.read_flow(filename) assert_array_equal(flow, flow_from_file) os.remove(filename) # write to two .jpg files tmp_dir = tempfile.gettempdir() cvb.write_flow(flow, osp.join(tmp_dir, 'test_flow.jpg'), quantize=True) assert osp.isfile(osp.join(tmp_dir, 'test_flow_dx.jpg')) assert osp.isfile(osp.join(tmp_dir, 'test_flow_dy.jpg')) os.remove(osp.join(tmp_dir, 'test_flow_dx.jpg')) os.remove(osp.join(tmp_dir, 'test_flow_dy.jpg'))
def infer(args): assert args.data_list is not None or args.frame_dir is not None if args.frame_dir is not None: data_list = generate_flow_list(args.frame_dir) args.data_list = data_list device = torch.device('cuda:0') print('====> Loading', args.pretrained_model_liteflownet) Flownet = LiteFlowNet(args.pretrained_model_liteflownet) Flownet.to(device) Flownet.eval() dataset_ = FlowInfer(args.data_list, size=args.img_size) dataloader_ = DataLoader(dataset_, batch_size=1, shuffle=False, num_workers=0) #task_bar = ProgressBar(dataset_.__len__()) with torch.no_grad(): for i, (f1, f2, output_path_) in tqdm(enumerate(dataloader_), total=len(dataset_)): f1 = f1.to(device) f2 = f2.to(device) flow = estimate(Flownet, f1, f2) output_path = output_path_[0] output_file = os.path.dirname(output_path) if not os.path.exists(output_file): os.makedirs(output_file) flow_numpy = flow[0].permute(1, 2, 0).data.cpu().numpy() cvb.write_flow(flow_numpy, output_path) sys.stdout.write('\n') print('LiteFlowNet Inference has been finished~!') print('Extracted Flow has been save in', output_file) return output_file
def infer(args): assert args.data_list is not None or args.frame_dir is not None if args.frame_dir is not None: data_list = generate_flow_list(args.frame_dir) args.data_list = data_list device = torch.device('cuda:0') Flownet = FlowNet2(args, requires_grad=False) print('====> Loading', args.pretrained_model_flownet2) flownet2_ckpt = torch.load(args.pretrained_model_flownet2) Flownet.load_state_dict(flownet2_ckpt['state_dict']) Flownet.to(device) Flownet.eval() dataset_ = FlowInfer(args.data_list, size=args.img_size) dataloader_ = DataLoader(dataset_, batch_size=1, shuffle=False) task_bar = ProgressBar(dataset_.__len__()) for i, (f1, f2, output_path_) in enumerate(dataloader_): f1 = f1.to(device) f2 = f2.to(device) flow = Flownet(f1, f2) output_path = output_path_[0] output_file = os.path.dirname(output_path) if not os.path.exists(output_file): os.makedirs(output_file) flow_numpy = flow[0].permute(1, 2, 0).data.cpu().numpy() cvb.write_flow(flow_numpy, output_path) task_bar.update() sys.stdout.write('\n') print('FlowNet2 Inference has been finished~!') print('Extracted Flow has been save in', output_file) return output_file
def infer(args): assert args.data_list is not None or args.frame_dir is not None if args.frame_dir is not None: data_list = generate_flow_list(args.frame_dir) args.data_list = data_list device = torch.device('cuda:0') Flownet = FlowNet2(args, requires_grad=False) print('====> Loading', args.pretrained_model_flownet2) flownet2_ckpt = torch.load(args.pretrained_model_flownet2) Flownet.load_state_dict(flownet2_ckpt['state_dict']) Flownet.to(device) Flownet.eval() dataset_ = FlowInfer(args.data_list, size=args.img_size) dataloader_ = DataLoader(dataset_, batch_size=1, shuffle=False) task_bar = ProgressBar(dataset_.__len__()) for i, (f1, f2, f3, f4, f5, output_path_1, output_path_2, output_path_3, output_path_4) in enumerate(dataloader_): f1 = f1.to(device) f2 = f2.to(device) f3 = f3.to(device) f4 = f4.to(device) f5 = f5.to(device) if (output_path_1[0][-4:] == 'rflo'): flow_1 = Flownet(f3, f1) flow_2 = Flownet(f3, f2) flow_3 = Flownet(f3, f4) flow_4 = Flownet(f3, f5) else: flow_1 = Flownet(f1, f3) flow_2 = Flownet(f2, f3) flow_3 = Flownet(f4, f3) flow_4 = Flownet(f5, f3) output_path_01 = output_path_1[0] output_path_02 = output_path_2[0] output_path_03 = output_path_3[0] output_path_04 = output_path_4[0] #print(output_path_1) output_file = os.path.dirname(output_path_01) if not os.path.exists(output_file): os.makedirs(output_file) flow_numpy = flow_1[0].permute(1, 2, 0).data.cpu().numpy() cvb.write_flow(flow_numpy, output_path_01) output_file = os.path.dirname(output_path_02) if not os.path.exists(output_file): os.makedirs(output_file) flow_numpy = flow_2[0].permute(1, 2, 0).data.cpu().numpy() cvb.write_flow(flow_numpy, output_path_02) output_file = os.path.dirname(output_path_03) if not os.path.exists(output_file): os.makedirs(output_file) flow_numpy = flow_3[0].permute(1, 2, 0).data.cpu().numpy() cvb.write_flow(flow_numpy, output_path_03) output_file = os.path.dirname(output_path_04) if not os.path.exists(output_file): os.makedirs(output_file) flow_numpy = flow_4[0].permute(1, 2, 0).data.cpu().numpy() cvb.write_flow(flow_numpy, output_path_04) task_bar.update() sys.stdout.write('\n') print('FlowNet2 Inference has been finished~!') print('Extracted Flow has been save in', output_file) return output_file
def flow_completion(self): if self.i == -1: data_list_dir = os.path.join(self.args.dataset_root, 'data') os.makedirs(data_list_dir, exist_ok=True) initial_data_list = os.path.join(data_list_dir, 'initial_test_list.txt') print('Generate datalist for initial step') data_list.gen_flow_initial_test_mask_list( flow_root=self.args.DATA_ROOT, output_txt_path=initial_data_list) self.args.EVAL_LIST = os.path.join(data_list_dir, 'initial_test_list.txt') self.args.output_root = os.path.join(self.args.dataset_root, 'Flow_res', 'initial_res') self.args.PRETRAINED_MODEL = self.args.PRETRAINED_MODEL_1 if self.args.img_size is not None: self.args.IMAGE_SHAPE = [ self.args.img_size[0] // 2, self.args.img_size[1] // 2 ] self.args.RES_SHAPE = self.args.IMAGE_SHAPE print('Flow Completion in First Step') self.args.MASK_ROOT = self.args.mask_root eval_dataset = FlowInitial.FlowSeq(self.args, isTest=True) self.flow_refinement_dataloader = iter( DataLoader(eval_dataset, batch_size=self.settings.batch_size, shuffle=False, drop_last=False, num_workers=self.args.n_threads)) if self.args.ResNet101: dfc_resnet101 = resnet_models.Flow_Branch(33, 2) self.dfc_resnet = nn.DataParallel(dfc_resnet101).to( self.args.device) else: dfc_resnet50 = resnet_models.Flow_Branch_Multi( input_chanels=33, NoLabels=2) self.dfc_resnet = nn.DataParallel(dfc_resnet50).to( self.args.device) self.dfc_resnet.eval() io.load_ckpt(self.args.PRETRAINED_MODEL, [('model', self.dfc_resnet)], strict=True) print('Load Pretrained Model from', self.args.PRETRAINED_MODEL) self.i += 1 complete = False with torch.no_grad(): try: item = next(self.flow_refinement_dataloader) input_x = item[0].to(self.args.device) flow_masked = item[1].to(self.args.device) mask = item[3].to(self.args.device) output_dir = item[4][0] res_flow = self.dfc_resnet(input_x) res_complete = res_flow * mask[:, 10: 11, :, :] + flow_masked[:, 10:12, :, :] * ( 1. - mask[:, 10:11, :, :]) output_dir_split = output_dir.split(',') output_file = os.path.join(self.args.output_root, output_dir_split[0]) output_basedir = os.path.dirname(output_file) if not os.path.exists(output_basedir): os.makedirs(output_basedir) res_save = res_complete[0].permute( 1, 2, 0).contiguous().cpu().data.numpy() cvb.write_flow(res_save, output_file) except StopIteration: complete = True if self.i == len(self.flow_refinement_dataloader) - 1 or complete: self.args.flow_root = self.args.output_root del self.flow_refinement_dataloader, self.dfc_resnet self.i = -1 self.state += 1