def _conv2d_layer(name,
        input,
        filters,
        kernel_size,
        strides=1,
        padding="VALID",
        groups_num=1,
        data_format="NHWC",
        # dilation_rate=1,
        # activation='Relu',
        use_bias=True,
        # use_bn=True,
        weight_initializer=flow.glorot_uniform_initializer(),
        bias_initializer=flow.zeros_initializer(),
        trainable=True,
        groups=1,
        ):
    
    return flow.layers.conv2d(
                input, filters, kernel_size, strides, padding,
                data_format=data_format, dilation_rate=1, groups=groups,
                activation=None, use_bias=use_bias,
                kernel_initializer=flow.xavier_normal_initializer(),
                bias_initializer=flow.zeros_initializer(),
                # kernel_regularizer=flow.variance_scaling_initializer(2.0, mode="fan_out", distribution="random_normal", data_format="NHWC"),
                # bias_regularizer=flow.zeros_initializer(),
                trainable=trainable, name=name)
Esempio n. 2
0
 def build_network(self, inputs):
     output = conv3d_layer(
         self.name,
         inputs=inputs,
         filters=self.conv2d.out_channels,
         kernel_size=self.kernel_dim,
         strides=self.stride,
         padding=self.padding,
         use_bias=False,
         weight_initializer=flow.xavier_normal_initializer(
             data_format='NCDHW'),
         trainable=self.trainable)
     return output
Esempio n. 3
0
    def test_float_initializer(test_case):
        initializers = [
            flow.random_normal_initializer(mean=3, stddev=4),
            flow.random_uniform_initializer(minval=-6, maxval=18),
            flow.truncated_normal_initializer(mean=-5, stddev=8),
            flow.xavier_uniform_initializer(data_format="NCHW"),
            flow.xavier_uniform_initializer(data_format="NHWC"),
            flow.xavier_normal_initializer(data_format="NCHW"),
            flow.xavier_normal_initializer(data_format="NHWC"),
            flow.constant_initializer(value=4),
            flow.ones_initializer(),
            flow.zeros_initializer(),
        ]

        kaiming_args = GenArgDict(
            OrderedDict(
                shape=[SHAPE],
                mode=["fan_in", "fan_out", "fan_avg"],
                distribution=["random_normal", "random_uniform"],
                data_format=["NCHW", "NHWC"],
                negative_slope=[0.5],
            ))
        vs_args = GenArgDict(
            OrderedDict(
                scale=[3.4],
                mode=["fan_in", "fan_out", "fan_avg"],
                distribution=[
                    "truncated_normal", "random_normal", "random_uniform"
                ],
                data_format=["NCHW", "NHWC"],
            ))
        for args in kaiming_args:
            initializers.append(flow.kaiming_initializer(**args))

        for args in vs_args:
            initializers.append(flow.variance_scaling_initializer(**args))

        for initializer in initializers:
            CompareTwoDistribution(test_case, flow.float32, initializer)
Esempio n. 4
0
def inflate_conv(inputs,
                 conv2d,
                 time_dim=1,
                 time_padding=0,
                 time_stride=1,
                 time_dilation=1,
                 trainable=True,
                 name=None):
    kernel_dim = [time_dim, conv2d.kernel_size[0], conv2d.kernel_size[1]]
    padding = [0, 0, time_padding, conv2d.padding[0], conv2d.padding[1]]
    stride = [time_stride, conv2d.stride[0], conv2d.stride[0]]
    dilation = [time_dilation, conv2d.dilation[0], conv2d.dilation[1]]
    output = conv3d_layer(
        name,
        inputs,
        conv2d.out_channels,
        kernel_size=kernel_dim,
        dilation_rate=dilation,
        strides=stride,
        padding=padding,
        weight_initializer=flow.xavier_normal_initializer(data_format='NCDHW'),
        use_bias=False,
        trainable=trainable)
    return output
Esempio n. 5
0
 def xavier_normal_(self, gain=1.0, *, data_format="NCHW"):
     assert gain == 1.0, "Only gain == 1.0 is supported now"
     initializer_conf = flow.xavier_normal_initializer(
         data_format=data_format)
     return self._init_by_initializer_conf(initializer_conf)
def _conv2d_layer(
        name,
        input,
        filters,
        kernel_size,
        strides=1,
        padding="VALID",
        groups_num=1,
        data_format="NHWC",
        # dilation_rate=1,
        # activation='Relu',
        use_bias=True,
        # use_bn=True,
        weight_initializer=flow.glorot_uniform_initializer(),
        bias_initializer=flow.zeros_initializer(),
        trainable=True,
        groups=1,
):
    # if isinstance(kernel_size, int):
    #     kernel_size_1 = kernel_size
    #     kernel_size_2 = kernel_size
    # if isinstance(kernel_size, tuple):
    #     kernel_size_1 = kernel_size[0]
    #     kernel_size_2 = kernel_size[1]
    # if data_format == "NHWC":
    #     weight_shape = (filters, kernel_size_1, kernel_size_2, input.shape[3])
    #     # weight_shape = (int(filters), int(input.shape[1]), int(kernel_size_1), int(kernel_size_2))
    #     # weight_shape = (filters, input.shape[1], kernel_size[0], kernel_size[0])
    # elif data_format == "NCHW":
    #     weight_shape = (filters, input.shape[1], kernel_size_1, kernel_size_2)
    #     # weight_shape = (filters, int(input.shape[1]), int(kernel_size[0]), int(kernel_size[0]))
    #     # weight_shape = (int(filters), int(kernel_size_2), int(kernel_size_1), int(input.shape[3]))
    # else:
    #     raise ValueError('data_format must be "NCHW" or "NHWC".')
    # weight = flow.get_variable(
    #     name + "-weight",
    #     shape=weight_shape,
    #     dtype=input.dtype,
    #     initializer=weight_initializer,
    #     trainable=trainable,
    # )
    # time = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
    # output = flow.layers.conv2d(
    #     input=input, filters=weight, strides=strides, padding=padding, dilations=1,data_format=data_format, name=name, groups=groups
    # )

    # if use_bias:
    #     bias = flow.get_variable(
    #         name + "-bias",
    #         shape=(filters,),
    #         dtype=input.dtype,
    #         initializer=bias_initializer,
    #         model_name="bias",
    #         trainable=trainable,
    #     )
    #     output = flow.nn.bias_add(output, bias, data_format)

    # if activation is not None:
    #     if activation == 'Relu':
    #         output = flow.nn.relu(output)
    #     else:
    #         raise NotImplementedError

    return flow.layers.conv2d(
        input,
        filters,
        kernel_size,
        strides,
        padding,
        data_format=data_format,
        dilation_rate=1,
        groups=groups,
        activation=None,
        use_bias=use_bias,
        kernel_initializer=flow.xavier_normal_initializer(),
        bias_initializer=flow.zeros_initializer(),
        # kernel_regularizer=flow.variance_scaling_initializer(2.0, mode="fan_out", distribution="random_normal", data_format="NHWC"),
        # bias_regularizer=flow.zeros_initializer(),
        trainable=trainable,
        name=name)