def load_checkpoints(config_path, checkpoint_path, cpu=False):
    with open(config_path) as f:
        config = yaml.load(f)

    generator = OcclusionAwareGenerator(
        **config["model_params"]["generator_params"],
        **config["model_params"]["common_params"],
    )
    if cpu:
        generator.cpu()
    else:
        generator.cuda()

    kp_detector = KPDetector(
        **config["model_params"]["kp_detector_params"],
        **config["model_params"]["common_params"],
    )
    if cpu:
        kp_detector.cpu()
    else:
        kp_detector.cuda()

    checkpoint = torch.load(checkpoint_path, map_location="cpu" if cpu else None)
    generator.load_state_dict(checkpoint["generator"])
    kp_detector.load_state_dict(checkpoint["kp_detector"])

    generator = DataParallelWithCallback(generator)
    kp_detector = DataParallelWithCallback(kp_detector)

    generator.eval()
    kp_detector.eval()

    return generator, kp_detector
Пример #2
0
def load_checkpoints(config_path, checkpoint_path, cpu=False):

    with open(config_path) as f:
        config = yaml.load(f)

    generator = OcclusionAwareGenerator(**config['model_params']['generator_params'],
                                        **config['model_params']['common_params'])
    if not cpu:
        generator.cuda()

    kp_detector = KPDetector(**config['model_params']['kp_detector_params'],
                             **config['model_params']['common_params'])
    if not cpu:
        kp_detector.cuda()
    
    if cpu:
        checkpoint = torch.load(checkpoint_path, map_location=torch.device('cpu'))
    else:
        checkpoint = torch.load(checkpoint_path)
 
    generator.load_state_dict(checkpoint['generator'])
    kp_detector.load_state_dict(checkpoint['kp_detector'])
    
    if not cpu:
        generator = DataParallelWithCallback(generator)
        kp_detector = DataParallelWithCallback(kp_detector)

    generator.eval()
    kp_detector.eval()
    
    return generator, kp_detector
Пример #3
0
def load_checkpoints(config_path, checkpoint_path, device='cuda'):

    with open(config_path) as f:
        config = yaml.load(f)

    generator = OcclusionAwareGenerator(
        **config['model_params']['generator_params'],
        **config['model_params']['common_params'])
    generator.to(device)

    kp_detector = KPDetector(**config['model_params']['kp_detector_params'],
                             **config['model_params']['common_params'])
    kp_detector.to(device)

    checkpoint = torch.load(checkpoint_path, map_location=device)
    generator.load_state_dict(checkpoint['generator'])
    kp_detector.load_state_dict(checkpoint['kp_detector'])

    generator = DataParallelWithCallback(generator)
    kp_detector = DataParallelWithCallback(kp_detector)

    generator.eval()
    kp_detector.eval()

    return generator, kp_detector
Пример #4
0
def load_checkpoints(config_path, checkpoint_path, device="cuda"):

    with open(config_path) as f:
        config = yaml.load(f, Loader=yaml.FullLoader)

    generator = OcclusionAwareGenerator(
        **config["model_params"]["generator_params"],
        **config["model_params"]["common_params"],
    )
    generator.to(device)

    kp_detector = KPDetector(
        **config["model_params"]["kp_detector_params"],
        **config["model_params"]["common_params"],
    )
    kp_detector.to(device)

    checkpoint = torch.load(checkpoint_path, map_location=device)
    generator.load_state_dict(checkpoint["generator"])
    kp_detector.load_state_dict(checkpoint["kp_detector"])

    generator = DataParallelWithCallback(generator)
    kp_detector = DataParallelWithCallback(kp_detector)

    generator.eval()
    kp_detector.eval()

    return generator, kp_detector
Пример #5
0
def load_checkpoints(config_path):
    with open(config_path) as f:
        config = yaml.load(f)
    pretrain_model = config['ckpt_model']
    generator = OcclusionAwareGenerator(
        **config['model_params']['generator_params'],
        **config['model_params']['common_params'])
    kp_detector = KPDetector(**config['model_params']['kp_detector_params'],
                             **config['model_params']['common_params'])
    load_ckpt(pretrain_model, generator=generator, kp_detector=kp_detector)
    generator.eval()
    kp_detector.eval()
    return generator, kp_detector
Пример #6
0
def load_checkpoints(config_path):

    with open(config_path) as f:
        config = yaml.load(f)
    pretrain_model = config['ckpt_model']
    generator = OcclusionAwareGenerator(
        **config['model_params']['generator_params'],
        **config['model_params']['common_params'])

    kp_detector = KPDetector(**config['model_params']['kp_detector_params'],
                             **config['model_params']['common_params'])
    if pretrain_model['generator'] is not None:
        if pretrain_model['generator'][-3:] == 'npz':
            G_param = np.load(pretrain_model['generator'],
                              allow_pickle=True)['arr_0'].item()
            G_param_clean = [(i, G_param[i]) for i in G_param
                             if 'num_batches_tracked' not in i]
            parameter_clean = generator.parameters()
            del (
                parameter_clean[65]
            )  # The parameters in AntiAliasInterpolation2d is not in dict_set and should be ignore.
            for p, v in zip(parameter_clean, G_param_clean):
                p.set_value(v[1])
        else:
            a, b = fluid.load_dygraph(pretrain_model['generator'])
            generator.set_dict(a)
        print('Restore Pre-trained Generator')
    if pretrain_model['kp'] is not None:
        if pretrain_model['kp'][-3:] == 'npz':
            KD_param = np.load(pretrain_model['kp'],
                               allow_pickle=True)['arr_0'].item()
            KD_param_clean = [(i, KD_param[i]) for i in KD_param
                              if 'num_batches_tracked' not in i]
            parameter_clean = kp_detector.parameters()
            for p, v in zip(parameter_clean, KD_param_clean):
                p.set_value(v[1])
        else:
            a, b = fluid.load_dygraph(pretrain_model['kp'])
            kp_detector.set_dict(a)
        print('Restore Pre-trained KD')
    generator.eval()
    kp_detector.eval()

    return generator, kp_detector
Пример #7
0
    def load_generator_and_keypoint_detector(self):
        config = self.load_config()
        generator = OcclusionAwareGenerator(
            **config['model_params']['generator_params'],
            **config['model_params']['common_params'])
        generator.to(self.device)
        kp_detector = KPDetector(
            **config['model_params']['kp_detector_params'],
            **config['model_params']['common_params'])
        kp_detector.to(self.device)

        checkpoints = self.load_checkpoints()
        generator.load_state_dict(checkpoints['generator'])
        kp_detector.load_state_dict(checkpoints['kp_detector'])

        generator.eval()
        kp_detector.eval()

        return generator, kp_detector
Пример #8
0
 def load_checkpoints(self):
     with open(self.config_path) as f:
         config = yaml.load(f)
 
     generator = OcclusionAwareGenerator(**config['model_params']['generator_params'],
                                         **config['model_params']['common_params'])
     generator.to(self.device)
 
     kp_detector = KPDetector(**config['model_params']['kp_detector_params'],
                              **config['model_params']['common_params'])
     kp_detector.to(self.device)
 
     checkpoint = torch.load(self.checkpoint_path, map_location=self.device)
     generator.load_state_dict(checkpoint['generator'])
     kp_detector.load_state_dict(checkpoint['kp_detector'])
 
     generator.eval()
     kp_detector.eval()
     
     return generator, kp_detector
Пример #9
0
                             **config['model_params']['common_params'])
    #   if not opt.cpu:
    #       kp_detector = kp_detector.cuda()

    Logger.load_cpk(opt.checkpoint,
                    generator=generator,
                    kp_detector=kp_detector,
                    use_cpu=False)

    vis = Visualizer()

    # generator = DataParallelWithCallback(generator)
    # kp_detector = DataParallelWithCallback(kp_detector)

    generator.eval()
    kp_detector.eval()

    with torch.no_grad():
        driving_video = VideoToTensor()(read_video(
            opt.driving_video, opt.image_shape + (3, )))['video']
        source_image = VideoToTensor()(read_video(
            opt.source_image, opt.image_shape + (3, )))['video'][:, :1]
        print(source_image.shape)

        driving_video = torch.from_numpy(driving_video).unsqueeze(0)
        source_image = torch.from_numpy(source_image).unsqueeze(0)

        out = transfer_one(generator, kp_detector, source_image, driving_video,
                           config['transfer_params'])
        '''
        # Pickle the out