Ejemplo n.º 1
0
    def _make_network(self):
        base_nc = 64
        n_blocks_list = [3, 4, 4, 3]
        n_stride_list = [2, 2, 2, 2]
        n_heatmap_deconv = 3
        conv6_nc = 1024

        self.encoder = HandDataEncoder(self.input_nc, self.num_joints, base_nc,
                                       n_blocks_list, n_stride_list,
                                       self.img_size, self.mode,
                                       self.norm_type, self.unproject_net_gen)
        input_nc = self.encoder.output_nc

        self.conv6a = bb.down_conv3x3(input_nc, conv6_nc, self.norm_layer)
        heatmap_interm_conv = handnet.IntermHeatmapConv(
            conv6_nc, self.num_joints, self.img_shape, self.norm_layer)
        dist_interm_conv = handnet.IntermDistConv(conv6_nc, self.num_joints,
                                                  self.norm_layer)
        unproject_net = self.unproject_net_gen.create()
        self.hand_interm_conv = handnet.HandConv(self.img_size,
                                                 heatmap_interm_conv,
                                                 dist_interm_conv,
                                                 unproject_net)

        self.conv6b = bb.down_conv3x3(conv6_nc, int(conv6_nc / 2),
                                      self.norm_layer)
        heatmap_conv = handnet.HeatmapConv(int(conv6_nc / 2), self.num_joints,
                                           n_heatmap_deconv, self.deconv_nc,
                                           self.img_shape, self.norm_layer)
        dist_conv = handnet.DistConv(int(conv6_nc / 2), self.num_joints,
                                     self.norm_layer)
        unproject_net = self.unproject_net_gen.create()
        self.hand_conv = handnet.HandConv(self.img_size, heatmap_conv,
                                          dist_conv, unproject_net)
Ejemplo n.º 2
0
    def _make_network(self):
        norm_layer = self.norm_layer

        conv1 = bb.down_conv3x3(self.input_nc, 128, norm_layer, stride = 2, acti_layer = nn.Sigmoid)
        conv2 = bb.down_conv3x3(128, 256, norm_layer, stride = 2, acti_layer = nn.Sigmoid)
        inner_product = bb.O2OBlock(256, self.output_nc, global_pool = True, acti_layer = nn.Sigmoid)
        # acti layer is sigmoid because of relu's dead neuron problem
        fc1 = bb.FCBlock(self.output_nc, self.output_nc, acti_layer = bb.Sigmoid6, bias = True)
        layers = [conv1, conv2, inner_product, fc1]

        return nn.Sequential(*layers)
Ejemplo n.º 3
0
    def __init__(self,
                 inplanes,
                 planes,
                 norm_layer,
                 stride=1,
                 downsample=False):
        super().__init__()
        self.stride = stride
        self.input_nc = inplanes
        self.output_nc = planes * self.expansion
        self.conv1 = bb.down_conv1x1(inplanes, planes, norm_layer)
        self.conv2 = bb.down_conv3x3(planes, planes, norm_layer, stride)
        self.conv3 = bb.down_conv1x1(planes,
                                     planes * self.expansion,
                                     norm_layer,
                                     acti_layer=None)
        self.relu = nn.ReLU(inplace=True)

        self.downsample = downsample or (stride > 1)
        if self.downsample:
            self.downsample_block = bb.down_conv1x1(inplanes,
                                                    planes * self.expansion,
                                                    norm_layer,
                                                    stride,
                                                    acti_layer=None)
Ejemplo n.º 4
0
    def __init__(self,
                 inplanes,
                 planes,
                 norm_layer=nn.BatchNorm2d,
                 stride=1,
                 upsample=False):
        super().__init__()

        self.stride = stride

        self.input_nc = inplanes
        self.output_nc = int(planes / self.expansion)

        if self.stride > 1:
            self.conv1 = bb.up_conv4x4(inplanes, planes, norm_layer, stride)
        else:
            self.conv1 = bb.down_conv1x1(inplanes, planes, norm_layer)

        self.conv2 = bb.down_conv3x3(planes, planes, norm_layer)

        self.conv3 = bb.down_conv1x1(planes,
                                     self.output_nc,
                                     norm_layer,
                                     acti_layer=None)
        self.relu = nn.ReLU(inplace=True)

        self.upsample = upsample or (stride > 1)
        if self.upsample:
            self.upsample_block = bb.up_conv4x4(inplanes,
                                                self.output_nc,
                                                norm_layer,
                                                stride,
                                                acti_layer=None)
Ejemplo n.º 5
0
    def _make_network(self):
        norm_layer = self.norm_layer
        conv1 = bb.down_conv3x3(self.input_nc, 256, norm_layer, stride = 2, acti_layer = nn.Sigmoid)
        one_by_one = bb.O2OBlock(256, self.output_nc, global_pool = True, acti_layer = nn.Sigmoid)

        # acti layer is sigmoid because of relu's dead neuron problem
        small_fc = bb.FCBlock(self.output_nc, self.output_nc, acti_layer = bb.Sigmoid6, bias = True)
        layers = [conv1, one_by_one, small_fc]

        return nn.Sequential(*layers)
Ejemplo n.º 6
0
 def __init__(self, num_features, norm_layer):
     super().__init__()
     self.input_nc = num_features
     self.output_nc = num_features
     self.conv1 = bb.down_conv1x1(num_features, num_features, norm_layer)
     self.conv2 = bb.down_conv3x3(num_features, num_features, norm_layer)
     self.conv3 = bb.down_conv1x1(num_features,
                                  num_features,
                                  norm_layer,
                                  acti_layer=None)
     self.relu = nn.ReLU(inplace=True)
Ejemplo n.º 7
0
    def _make_network(self):
        norm_layer = self.norm_layer

        conv1 = bb.down_conv3x3(self.input_nc, int(self.input_nc / 4), norm_layer)

        deconv1 = bb.up_conv4x4(int(self.input_nc / 4), self.deconv_nc, norm_layer, stride = 2)
        blocks = [conv1, deconv1]
        deconv_input_nc = deconv1.output_nc
        for _ in range(self.n_deconv - 1):
            deconv = bb.up_conv4x4(self.deconv_nc, self.deconv_nc, norm_layer, stride = 2)
            blocks.append(deconv)

        final_conv = bb.ConvBlock(nn.Conv2d, self.deconv_nc, self.output_nc,
                            kernel_size = 1, stride = 1, padding = 0, bias = True,
                            norm_layer = None, acti_layer = nn.Sigmoid)

        blocks.append(final_conv)
        return nn.Sequential(*blocks)