Esempio n. 1
0
def flow_completion(args):

    data_list_dir = os.path.join(args.dataset_root, 'data')
    if not os.path.exists(data_list_dir):
        os.makedirs(data_list_dir)
    initial_data_list = os.path.join(data_list_dir, 'initial_test_list.txt')
    print('Generate datalist for initial step')

    from dataset.data_list import gen_flow_initial_test_mask_list
    gen_flow_initial_test_mask_list(flow_root=args.DATA_ROOT,
                                    output_txt_path=initial_data_list)
    args.EVAL_LIST = os.path.join(data_list_dir, 'initial_test_list.txt')

    from tools.test_scripts import test_initial_stage
    args.output_root = os.path.join(args.dataset_root, 'Flow_res',
                                    'initial_res')
    args.PRETRAINED_MODEL = args.PRETRAINED_MODEL_1

    if args.img_size is not None:
        args.IMAGE_SHAPE = [args.img_size[0] // 2, args.img_size[1] // 2]
        args.RES_SHAPE = args.IMAGE_SHAPE

    print('Flow Completion in First Step')
    test_initial_stage(args)
    args.flow_root = args.output_root

    if args.MS:
        args.ResNet101 = False
        from tools.test_scripts import test_refine_stage
        args.PRETRAINED_MODEL = args.PRETRAINED_MODEL_2
        args.IMAGE_SHAPE = [320, 600]
        args.RES_SHAPE = [320, 600]
        args.DATA_ROOT = args.output_root
        args.output_root = os.path.join(args.dataset_root, 'Flow_res',
                                        'stage2_res')

        stage2_data_list = os.path.join(data_list_dir, 'stage2_test_list.txt')
        from dataset.data_list import gen_flow_refine_test_mask_list
        gen_flow_refine_test_mask_list(flow_root=args.DATA_ROOT,
                                       output_txt_path=stage2_data_list)
        args.EVAL_LIST = stage2_data_list
        test_refine_stage(args)

        args.PRETRAINED_MODEL = args.PRETRAINED_MODEL_3
        args.IMAGE_SHAPE = [480, 840]
        args.RES_SHAPE = [480, 840]
        args.DATA_ROOT = args.output_root
        args.output_root = os.path.join(args.dataset_root, 'Flow_res',
                                        'stage3_res')

        stage3_data_list = os.path.join(data_list_dir, 'stage3_test_list.txt')
        from dataset.data_list import gen_flow_refine_test_mask_list
        gen_flow_refine_test_mask_list(flow_root=args.DATA_ROOT,
                                       output_txt_path=stage3_data_list)
        args.EVAL_LIST = stage3_data_list
        test_refine_stage(args)
        args.flow_root = args.output_root
    def flow_completion(self):
        if self.i == -1:
            data_list_dir = os.path.join(self.args.dataset_root, 'data')
            os.makedirs(data_list_dir, exist_ok=True)
            initial_data_list = os.path.join(data_list_dir,
                                             'initial_test_list.txt')
            print('Generate datalist for initial step')
            data_list.gen_flow_initial_test_mask_list(
                flow_root=self.args.DATA_ROOT,
                output_txt_path=initial_data_list)
            self.args.EVAL_LIST = os.path.join(data_list_dir,
                                               'initial_test_list.txt')

            self.args.output_root = os.path.join(self.args.dataset_root,
                                                 'Flow_res', 'initial_res')
            self.args.PRETRAINED_MODEL = self.args.PRETRAINED_MODEL_1

            if self.args.img_size is not None:
                self.args.IMAGE_SHAPE = [
                    self.args.img_size[0] // 2, self.args.img_size[1] // 2
                ]
                self.args.RES_SHAPE = self.args.IMAGE_SHAPE

            print('Flow Completion in First Step')
            self.args.MASK_ROOT = self.args.mask_root
            eval_dataset = FlowInitial.FlowSeq(self.args, isTest=True)
            self.flow_refinement_dataloader = iter(
                DataLoader(eval_dataset,
                           batch_size=self.settings.batch_size,
                           shuffle=False,
                           drop_last=False,
                           num_workers=self.args.n_threads))
            if self.args.ResNet101:
                dfc_resnet101 = resnet_models.Flow_Branch(33, 2)
                self.dfc_resnet = nn.DataParallel(dfc_resnet101).to(
                    self.args.device)
            else:
                dfc_resnet50 = resnet_models.Flow_Branch_Multi(
                    input_chanels=33, NoLabels=2)
                self.dfc_resnet = nn.DataParallel(dfc_resnet50).to(
                    self.args.device)
            self.dfc_resnet.eval()
            io.load_ckpt(self.args.PRETRAINED_MODEL,
                         [('model', self.dfc_resnet)],
                         strict=True)
            print('Load Pretrained Model from', self.args.PRETRAINED_MODEL)

        self.i += 1
        complete = False
        with torch.no_grad():
            try:
                item = next(self.flow_refinement_dataloader)
                input_x = item[0].to(self.args.device)
                flow_masked = item[1].to(self.args.device)
                mask = item[3].to(self.args.device)
                output_dir = item[4][0]

                res_flow = self.dfc_resnet(input_x)
                res_complete = res_flow * mask[:, 10:
                                               11, :, :] + flow_masked[:, 10:12, :, :] * (
                                                   1. - mask[:, 10:11, :, :])

                output_dir_split = output_dir.split(',')
                output_file = os.path.join(self.args.output_root,
                                           output_dir_split[0])
                output_basedir = os.path.dirname(output_file)
                if not os.path.exists(output_basedir):
                    os.makedirs(output_basedir)
                res_save = res_complete[0].permute(
                    1, 2, 0).contiguous().cpu().data.numpy()
                cvb.write_flow(res_save, output_file)
            except StopIteration:
                complete = True
        if self.i == len(self.flow_refinement_dataloader) - 1 or complete:
            self.args.flow_root = self.args.output_root
            del self.flow_refinement_dataloader, self.dfc_resnet
            self.i = -1
            self.state += 1