def compile(self, input_image: tf.Tensor, paf_label_layer: MakiTensor,
                heatmap_label_layer: MakiTensor):
        """
        Apply label correction for label heatmap and paf tensors by taken output from teacher

        Parameters
        ----------
        input_image : tf.Tensor
        paf_label_layer : MakiTensor
        heatmap_label_layer : MakiTensor

        Returns
        -------
        paf : MakiLayerWrapper
            Final paf layer with correction
        heatmap : MakiLayerWrapper
            Final heatmap layer with correction

        """
        teacher_paf_tensor, teacher_heatmap_tensor = self.__init_teacher(
            input_image)
        paf_corrected = self.__label_correction_paf(
            t_paf=teacher_paf_tensor, l_paf=paf_label_layer.get_data_tensor())

        heatmap_corrected = self.__label_correction_heatmap(
            t_heatmap=teacher_heatmap_tensor,
            l_heatmap=heatmap_label_layer.get_data_tensor())

        paf_mf = MakiLayerWrapper(paf_corrected)
        heatmap_mf = MakiLayerWrapper(heatmap_corrected)

        return paf_mf, heatmap_mf
예제 #2
0
def identity_block(x: MakiTensor,
                   block_id: int,
                   unit_id: int,
                   num_block=None,
                   in_f=None,
                   use_bias=False,
                   activation=tf.nn.relu,
                   bn_params={}):
    """
    Parameters
    ----------
    x : MakiTensor
        Input MakiTensor.
    in_f : int
        Number of input feature maps. By default None (shape will be getted from tensor).
    activation : tensorflow function
        The function of activation, by default tf.nn.relu.
    use_bias : bool
        Use bias on layers or not.
    block_id : int
        Number of block (used in name of layers).
    unit_id : int
        Unit of block (used in name of layers).
    num_block : int
        Number of sum operation (used in name of layers).
    bn_params : dict
        Parameters for BatchNormLayer. If empty all parameters will have default valued.

    Returns
    ---------
    x : MakiTensor
        Output MakiTensor.
    """

    prefix_name = 'block' + str(block_id) + '/unit_' + str(unit_id)
    if num_block is None:
        num_block = prefix_name + '/sum_operation'
    else:
        num_block = 'add_' + str(num_block)

    if in_f is None:
        in_f = x.get_shape()[-1]

    reduction = int(in_f / 4)

    mx = ConvLayer(kw=1,
                   kh=1,
                   in_f=in_f,
                   out_f=reduction,
                   activation=None,
                   use_bias=use_bias,
                   name=prefix_name + '/bottleneck_v1/conv1/weights')(x)

    mx = BatchNormLayer(D=reduction,
                        name=prefix_name + '/bottleneck_v1/conv1/BatchNorm',
                        **bn_params)(mx)
    mx = ActivationLayer(activation=activation,
                         name=prefix_name + '/bottleneck_v1/conv1/activ')(mx)

    mx = ConvLayer(kw=3,
                   kh=3,
                   in_f=reduction,
                   out_f=reduction,
                   activation=None,
                   use_bias=use_bias,
                   name=prefix_name + '/bottleneck_v1/conv2/weights')(mx)

    mx = BatchNormLayer(D=reduction,
                        name=prefix_name + '/bottleneck_v1/conv2/BatchNorm',
                        **bn_params)(mx)
    mx = ActivationLayer(activation=activation,
                         name=prefix_name + '/bottleneck_v1/conv2/activ')(mx)

    mx = ConvLayer(kw=1,
                   kh=1,
                   in_f=reduction,
                   out_f=in_f,
                   activation=None,
                   use_bias=use_bias,
                   name=prefix_name + '/bottleneck_v1/conv3/weights')(mx)

    mx = BatchNormLayer(D=in_f,
                        name=prefix_name + '/bottleneck_v1/conv3/BatchNorm',
                        **bn_params)(mx)

    x = SumLayer(name=num_block)([mx, x])

    return x
예제 #3
0
def without_pointwise_CB(x: MakiTensor,
                         block_id: int,
                         unit_id: int,
                         num_block=None,
                         in_f=None,
                         use_bias=False,
                         activation=tf.nn.relu,
                         stride=2,
                         out_f=None,
                         bn_params={}):
    """
    Parameters
    ----------
    x : MakiTensor
        Input MakiTensor.
    in_f : int
        Number of input feature maps. By default is None (shape will be getted from tensor).
    out_f : int
        Number of output feature maps. By default is None which means out_f = 2 * in_f.
    activation : tensorflow function
        The function of activation. By default tf.nn.relu.
    use_bias : bool
        Use bias on layers or not.
    block_id : int
        Number of block (used in name of layers).
    unit_id : int
        Unit of block (used in name of layers).
    num_block : int
        Number of sum operation (used in name of layers).
    bn_params : dict
        Parameters for BatchNormLayer. If empty all parameters will have default valued.

    Returns
    ---------
    x : MakiTensor
        Output MakiTensor.
    """
    prefix_name = 'stage' + str(block_id) + '_unit' + str(unit_id) + '_'

    if num_block is None:
        num_block = prefix_name + '/sum_operation'
    else:
        num_block = 'add_' + str(num_block)

    if in_f is None:
        in_f = x.get_shape()[-1]

    if out_f is None:
        out_f = int(2 * in_f)

    x = BatchNormLayer(D=in_f, name=prefix_name + 'bn1', **bn_params)(x)
    x = ActivationLayer(activation=activation,
                        name=prefix_name + 'activation_1')(x)

    mx = ZeroPaddingLayer(padding=[[1, 1], [1, 1]],
                          name=prefix_name + 'zero_pad_1')(x)

    mx = ConvLayer(kw=3,
                   kh=3,
                   in_f=in_f,
                   out_f=out_f,
                   activation=None,
                   stride=stride,
                   padding='VALID',
                   use_bias=use_bias,
                   name=prefix_name + 'conv1')(mx)

    mx = BatchNormLayer(D=out_f, name=prefix_name + 'bn2', **bn_params)(mx)
    mx = ActivationLayer(activation=activation,
                         name=prefix_name + 'activation_2')(mx)

    mx = ZeroPaddingLayer(padding=[[1, 1], [1, 1]],
                          name=prefix_name + 'zero_pad_2')(mx)
    mx = ConvLayer(kw=3,
                   kh=3,
                   in_f=out_f,
                   out_f=out_f,
                   activation=None,
                   padding='VALID',
                   use_bias=use_bias,
                   name=prefix_name + 'conv2')(mx)

    sx = ConvLayer(kw=1,
                   kh=1,
                   in_f=in_f,
                   out_f=out_f,
                   stride=stride,
                   padding='VALID',
                   activation=None,
                   use_bias=use_bias,
                   name=prefix_name + 'sc/conv')(x)

    x = SumLayer(name=num_block)([mx, sx])

    return x
예제 #4
0
def to_makitensor(x, name):
    return MakiTensor(x,
                      parent_layer=None,
                      parent_tensor_names=[],
                      previous_tensors={},
                      name=name)