Exemplo n.º 1
0
    def __init__(self, block_expansion, num_segments, num_channels, max_features,
                 num_blocks, temperature, estimate_affine_part=False, scale_factor=1):
        super(SegmentationModule, self).__init__()

        self.predictor = Hourglass(block_expansion, in_features=num_channels,
                                   max_features=max_features, num_blocks=num_blocks)
        self.num_segments = num_segments
        self.shift = nn.Conv2d(in_channels=self.predictor.out_filters, out_channels=num_segments, kernel_size=(7, 7),
                               padding=(3, 3))

        if estimate_affine_part:
            self.affine = nn.Conv2d(in_channels=self.predictor.out_filters,
                                    out_channels=4 * num_segments, kernel_size=(7, 7), padding=(3, 3))
            self.affine.weight.data.zero_()
            self.affine.bias.data.copy_(torch.tensor([1, 0, 0, 1] * num_segments, dtype=torch.float))
        else:
            self.affine = None

        self.segmentation = nn.Conv2d(in_channels=self.predictor.out_filters,
                                      out_channels=(1 + num_segments), kernel_size=(7, 7), padding=(3, 3))

        self.temperature = temperature
        self.scale_factor = scale_factor
        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels, self.scale_factor)
Exemplo n.º 2
0
    def __init__(self, num_channels, num_kp, block_expansion, max_features, num_down_blocks,
                 num_bottleneck_blocks, estimate_occlusion_map=False, dense_motion_params=None,
                 estimate_jacobian=False, scale_factor=0.25):
        super(Generator, self).__init__()
        self.source_first = AntiAliasInterpolation2d(num_channels, scale_factor)
        first_input = int(block_expansion / scale_factor)
        self.first = SameBlock2d(num_channels + 2, first_input, kernel_size=(7, 7), padding=(3, 3))  # +2 masks
        down_blocks = []
        for i in range(num_down_blocks):
            in_features = min(max_features, block_expansion * (2 ** i))
            out_features = min(max_features, block_expansion * (2 ** (i + 1)))
            down_blocks.append(DownBlock2d(in_features, out_features, kernel_size=(3, 3), padding=(1, 1)))
        self.down_blocks = nn.ModuleList(down_blocks)

        up_blocks = []
        for i in range(num_down_blocks):
            in_features = min(max_features, block_expansion * (2 ** (num_down_blocks - i)))
            out_features = min(max_features, block_expansion * (2 ** (num_down_blocks - i - 1)))
            up_blocks.append(UpBlock2d(in_features, out_features, kernel_size=(3, 3), padding=(1, 1)))
        self.up_blocks = nn.ModuleList(up_blocks)

        self.bottleneck = torch.nn.Sequential()
        in_features = min(max_features, block_expansion * (2 ** num_down_blocks))
        for i in range(num_bottleneck_blocks):
            self.bottleneck.add_module('r' + str(i), ResBlock2d(in_features, kernel_size=(3, 3), padding=(1, 1)))

        self.final = nn.Conv2d(block_expansion, num_channels, kernel_size=(7, 7), padding=(3, 3))
        self.num_channels = num_channels
        self.hourglass = Hourglass(block_expansion=block_expansion, in_features=8, max_features=1024, num_blocks=5)
        self.final_hourglass = nn.Conv2d(in_channels=self.hourglass.out_filters, out_channels=3, kernel_size=(7, 7),
                                         padding=(3, 3))
    def __init__(self,
                 block_expansion,
                 num_channels,
                 max_features,
                 num_blocks,
                 temperature,
                 estimate_jacobian=False,
                 scale_factor=1,
                 single_jacobian_map=False,
                 pad=0):
        super(MaskGenerator, self).__init__()

        self.predictor = HourglassNoRes(block_expansion,
                                        in_features=num_channels,
                                        max_features=max_features,
                                        num_blocks=num_blocks)

        self.ref = nn.Conv2d(in_channels=32,
                             out_channels=1,
                             kernel_size=(7, 7),
                             padding=pad)

        self.sigmoid = nn.Sigmoid()
        self.temperature = temperature
        self.scale_factor = scale_factor
        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels,
                                                 self.scale_factor)
Exemplo n.º 4
0
 def __init__(self, scales, num_channels):
     super(ImagePyramide, self).__init__()
     downs = {}
     for scale in scales:
         # TODO:阅读它的抗锯齿2维插值
         downs[str(scale).replace('.', '-')] = AntiAliasInterpolation2d(num_channels, scale)
     self.downs = nn.ModuleDict(downs)
Exemplo n.º 5
0
    def __init__(self, block_expansion, num_regions, num_channels, max_features,
                 num_blocks, temperature, estimate_affine=False, scale_factor=1,
                 pca_based=False, fast_svd=False, pad=3):
        super(RegionPredictor, self).__init__()
        self.predictor = Hourglass(block_expansion, in_features=num_channels,
                                   max_features=max_features, num_blocks=num_blocks)

        self.regions = nn.Conv2d(in_channels=self.predictor.out_filters, out_channels=num_regions, kernel_size=(7, 7),
                                 padding=pad)

        # FOMM-like regression based representation
        if estimate_affine and not pca_based:
            self.jacobian = nn.Conv2d(in_channels=self.predictor.out_filters,
                                      out_channels=4, kernel_size=(7, 7), padding=pad)
            self.jacobian.weight.data.zero_()
            self.jacobian.bias.data.copy_(torch.tensor([1, 0, 0, 1], dtype=torch.float))
        else:
            self.jacobian = None

        self.temperature = temperature
        self.scale_factor = scale_factor
        self.pca_based = pca_based
        self.fast_svd = fast_svd

        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels, self.scale_factor)
Exemplo n.º 6
0
 def __init__(self, scales, num_channels):
     super(ImagePyramide, self).__init__()
     self.downs = paddle.nn.LayerList()
     self.name_list = []
     for scale in scales:
         self.downs.add_sublayer(
             str(scale).replace('.', '-'),
             AntiAliasInterpolation2d(num_channels, scale))
         self.name_list.append(str(scale).replace('.', '-'))
Exemplo n.º 7
0
    def __init__(self,
                 block_expansion,
                 num_kp,
                 num_channels,
                 max_features,
                 num_blocks,
                 temperature,
                 estimate_jacobian=False,
                 scale_factor=1,
                 single_jacobian_map=False,
                 pad=0,
                 use_landmarks=False):
        super(KPDetector, self).__init__()

        self.use_landmarks = use_landmarks
        if use_landmarks:
            num_kp = 68
            self.fan = Landmarks()  #.requires_grad_(False)

        self.predictor = Hourglass(block_expansion,
                                   in_features=num_channels,
                                   max_features=max_features,
                                   num_blocks=num_blocks)

        self.kp = nn.Conv2d(in_channels=self.predictor.out_filters,
                            out_channels=num_kp,
                            kernel_size=(7, 7),
                            padding=pad)

        if estimate_jacobian:
            self.num_jacobian_maps = 1 if single_jacobian_map else num_kp
            if self.use_landmarks:
                self.jacobian = nn.Conv2d(
                    in_channels=self.predictor.out_filters,
                    out_channels=4 * self.num_jacobian_maps,
                    kernel_size=(7, 7),
                    padding=3)
            else:
                self.jacobian = nn.Conv2d(
                    in_channels=self.predictor.out_filters,
                    out_channels=4 * self.num_jacobian_maps,
                    kernel_size=(7, 7),
                    padding=pad)
            self.jacobian.weight.data.zero_()
            self.jacobian.bias.data.copy_(
                torch.tensor([1, 0, 0, 1] * self.num_jacobian_maps,
                             dtype=torch.float))
        else:
            self.jacobian = None

        self.temperature = temperature
        self.scale_factor = scale_factor
        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels,
                                                 self.scale_factor)
Exemplo n.º 8
0
    def __init__(self, blend_scale=1, first_order_motion_model=False, **kwargs):
        super(PartSwapGenerator, self).__init__(**kwargs)
        if blend_scale == 1:
            self.blend_downsample = lambda x: x
        else:
            self.blend_downsample = AntiAliasInterpolation2d(1, blend_scale)

        if first_order_motion_model:
            self.dense_motion_network = DenseMotionNetwork()
        else:
            self.dense_motion_network = None
Exemplo n.º 9
0
    def __init__(self,
                 checkpoint_with_kp,
                 block_expansion,
                 num_kp,
                 kp_after_softmax,
                 num_channels,
                 max_features,
                 num_blocks,
                 temperature,
                 estimate_jacobian=False,
                 scale_factor=1,
                 single_jacobian_map=False,
                 pad=0,
                 softmax_mask=False):
        super(KPDetector, self).__init__()

        self.predictor = Hourglass(block_expansion,
                                   in_features=num_channels,
                                   max_features=max_features,
                                   num_blocks=num_blocks)

        self.kp = nn.Conv2d(in_channels=self.predictor.out_filters,
                            out_channels=num_kp,
                            kernel_size=(7, 7),
                            padding=pad)

        self.kp_after_softmax = kp_after_softmax
        self.softmax_mask = softmax_mask

        if estimate_jacobian:
            self.num_jacobian_maps = 1 if single_jacobian_map else num_kp
            self.jacobian = nn.Conv2d(in_channels=self.predictor.out_filters,
                                      out_channels=4 * self.num_jacobian_maps,
                                      kernel_size=(7, 7),
                                      padding=pad)
            self.jacobian.weight.data.zero_()
            self.jacobian.bias.data.copy_(
                torch.tensor([1, 0, 0, 1] * self.num_jacobian_maps,
                             dtype=torch.float))
        else:
            self.jacobian = None

        self.temperature = temperature
        self.scale_factor = scale_factor
        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels,
                                                 self.scale_factor)

        self.load_state_dict(checkpoint_with_kp['kp_detector'])
Exemplo n.º 10
0
    def __init__(self,
                 block_expansion,
                 num_kp,
                 num_channels,
                 max_features,
                 num_blocks,
                 temperature,
                 estimate_jacobian=False,
                 scale_factor=1,
                 single_jacobian_map=False,
                 pad=0):
        super(KPDetector, self).__init__()

        # 详情参考 Hourglass 网络结构
        # 简单而言就是一个 encoder + 一个 decoder
        self.predictor = Hourglass(block_expansion,
                                   in_features=num_channels,
                                   max_features=max_features,
                                   num_blocks=num_blocks)

        # 声明一个 key point 的卷积层
        self.kp = nn.Conv2d(in_channels=self.predictor.out_filters,
                            out_channels=num_kp,
                            kernel_size=(7, 7),
                            padding=pad)

        if estimate_jacobian:
            self.num_jacobian_maps = 1 if single_jacobian_map else num_kp
            self.jacobian = nn.Conv2d(in_channels=self.predictor.out_filters,
                                      out_channels=4 * self.num_jacobian_maps,
                                      kernel_size=(7, 7),
                                      padding=pad)
            self.jacobian.weight.data.zero_()
            self.jacobian.bias.data.copy_(
                torch.tensor([1, 0, 0, 1] * self.num_jacobian_maps,
                             dtype=torch.float))
        else:
            self.jacobian = None

        self.temperature = temperature
        self.scale_factor = scale_factor
        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels,
                                                 self.scale_factor)
Exemplo n.º 11
0
    def __init__(self, block_expansion, num_blocks, max_features, num_kp, num_channels, estimate_occlusion_map=False,
                 scale_factor=1, kp_variance=0.01):
        super(DenseMotionNetwork, self).__init__()
        self.hourglass = Hourglass(block_expansion=block_expansion, in_features=(num_kp + 1) * (num_channels + 1),
                                   max_features=max_features, num_blocks=num_blocks)

        self.mask = nn.Conv2d(self.hourglass.out_filters, num_kp + 1, kernel_size=(7, 7), padding=(3, 3))

        if estimate_occlusion_map:
            self.occlusion = nn.Conv2d(self.hourglass.out_filters, 1, kernel_size=(7, 7), padding=(3, 3))
        else:
            self.occlusion = None

        self.num_kp = num_kp
        self.scale_factor = scale_factor
        self.kp_variance = kp_variance

        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels, self.scale_factor)
Exemplo n.º 12
0
    def __init__(self,
                 block_expansion,
                 num_kp,
                 num_channels,
                 max_features,
                 num_blocks,
                 temperature,
                 estimate_jacobian=False,
                 scale_factor=1,
                 single_jacobian_map=False,
                 pad=0):
        super(KPDetector, self).__init__()

        self.predictor = Hourglass(block_expansion,
                                   in_features=num_channels,
                                   max_features=max_features,
                                   num_blocks=num_blocks)

        self.kp = dygraph.Conv2D(num_channels=self.predictor.out_filters,
                                 num_filters=num_kp,
                                 filter_size=(7, 7),
                                 padding=pad)

        if estimate_jacobian:
            self.num_jacobian_maps = 1 if single_jacobian_map else num_kp
            self.jacobian = dygraph.Conv2D(
                num_channels=self.predictor.out_filters,
                num_filters=4 * self.num_jacobian_maps,
                filter_size=(7, 7),
                padding=pad)
            self.jacobian.weight.set_value(
                np.zeros(list(self.jacobian.weight.shape), dtype=np.float32))
            self.jacobian.bias.set_value(
                np.array([1, 0, 0, 1] * self.num_jacobian_maps,
                         dtype=np.float32))
        else:
            self.jacobian = None

        self.temperature = temperature
        self.scale_factor = scale_factor
        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels,
                                                 self.scale_factor)
Exemplo n.º 13
0
    def __init__(self,
                 block_expansion,
                 num_blocks,
                 max_features,
                 num_regions,
                 num_channels,
                 estimate_occlusion_map=False,
                 scale_factor=1,
                 region_var=0.01,
                 use_covar_heatmap=False,
                 use_deformed_source=True,
                 revert_axis_swap=False):
        super(PixelwiseFlowPredictor, self).__init__()
        self.hourglass = Hourglass(block_expansion=block_expansion,
                                   in_features=(num_regions + 1) *
                                   (num_channels * use_deformed_source + 1),
                                   max_features=max_features,
                                   num_blocks=num_blocks)

        self.mask = nn.Conv2d(self.hourglass.out_filters,
                              num_regions + 1,
                              kernel_size=(7, 7),
                              padding=(3, 3))

        if estimate_occlusion_map:
            self.occlusion = nn.Conv2d(self.hourglass.out_filters,
                                       1,
                                       kernel_size=(7, 7),
                                       padding=(3, 3))
        else:
            self.occlusion = None

        self.num_regions = num_regions
        self.scale_factor = scale_factor
        self.region_var = region_var
        self.use_covar_heatmap = use_covar_heatmap
        self.use_deformed_source = use_deformed_source
        self.revert_axis_swap = revert_axis_swap

        if self.scale_factor != 1:
            self.down = AntiAliasInterpolation2d(num_channels,
                                                 self.scale_factor)