def create_heatmap_representations(self, source_image, kp_driving_value, kp_source_value): """ Eq 6. in the paper H_k(z) """ spatial_size = source_image.shape[2:] gaussian_driving = kp2gaussian(kp_driving_value, spatial_size=spatial_size, kp_variance=self.kp_variance) gaussian_source = kp2gaussian(kp_source_value, spatial_size=spatial_size, kp_variance=self.kp_variance) heatmap = gaussian_driving - gaussian_source #adding background feature zeros = torch.zeros(heatmap.shape[0], 1, spatial_size[0], spatial_size[1]).type(heatmap.type()) heatmap = torch.cat([zeros, heatmap], dim=1) heatmap = heatmap.unsqueeze(2) return heatmap
def create_heatmap_representations(self, source_image, kp_driving, kp_source): """ Eq 6. in the paper H_k(z) """ spatial_size = source_image.shape[2:] # TODO:空余时间理解下这部分的 kp 的高斯变换(猜测是把 key point 的权重用一个高斯分布表示) gaussian_driving = kp2gaussian(kp_driving, spatial_size=spatial_size, kp_variance=self.kp_variance) gaussian_source = kp2gaussian(kp_source, spatial_size=spatial_size, kp_variance=self.kp_variance) heatmap = gaussian_driving - gaussian_source #adding background feature zeros = torch.zeros(heatmap.shape[0], 1, spatial_size[0], spatial_size[1]).type(heatmap.type()) heatmap = torch.cat([zeros, heatmap], dim=1) heatmap = heatmap.unsqueeze(2) return heatmap
def create_heatmap_representations(self, source_image, kp_driving, kp_source): """ Eq 6. in the paper H_k(z) """ spatial_size = source_image.shape[2:] gaussian_driving = kp2gaussian(kp_driving, spatial_size=spatial_size, kp_variance=self.kp_variance) gaussian_source = kp2gaussian(kp_source, spatial_size=spatial_size, kp_variance=self.kp_variance) heatmap = gaussian_driving - gaussian_source # adding background feature zeros = paddle.to_tensor( np.zeros((heatmap.shape[0], 1, spatial_size[0], spatial_size[1]), dtype=np.float32)) heatmap = paddle.concat([zeros, heatmap], axis=1) heatmap = heatmap.unsqueeze(2) return heatmap
def forward(self, x, kp=None): feature_maps = [] out = x if self.use_kp: heatmap = kp2gaussian(kp, x.shape[2:], self.kp_variance) out = torch.cat([out, heatmap], dim=1) for down_block in self.down_blocks: feature_maps.append(down_block(out)) out = feature_maps[-1] prediction_map = self.conv(out) return feature_maps, prediction_map
def forward(self, x, kp=None): feature_maps = [] out = x if self.use_kp: heatmap = kp2gaussian(kp, x.shape[2:], self.kp_variance) out = fluid.layers.concat([out, heatmap], axis=1) for down_block in self.down_blocks: feature_maps.append(down_block(out)) out = feature_maps[-1] if self.sn is not None: self.conv.weight.set_value(self.sn(self.conv.parameters()[0])) prediction_map = self.conv(out) return feature_maps, prediction_map
def create_heatmap_representations(self, source_image, kp_driving, kp_source, oval=False): """ Eq 6. in the paper H_k(z) """ spatial_size = source_image.shape[2:] if not oval: gaussian_driving = kp2gaussian(kp_driving, spatial_size=spatial_size, kp_variance=self.kp_variance) gaussian_source = kp2gaussian(kp_source, spatial_size=spatial_size, kp_variance=self.kp_variance) else: #modified gaussian_driving = kp2ESgaussian(kp_driving, spatial_size=spatial_size, kp_variance_1=self.kp_variance, kp_variance_2=self.kp_variance * 5) # added gaussian_source = kp2ESgaussian(kp_source, spatial_size=spatial_size, kp_variance_1=self.kp_variance, kp_variance_2=self.kp_variance * 5) # added heatmap = gaussian_driving - gaussian_source #adding background feature zeros = torch.zeros(heatmap.shape[0], 1, spatial_size[0], spatial_size[1]).type(heatmap.type()) heatmap = torch.cat([zeros, heatmap], dim=1) heatmap = heatmap.unsqueeze(2) return heatmap