コード例 #1
0
    def call(self, x, mask=None):
        if K.backend() == 'tensorflow':
            return self._call_tf(x, mask)

        half_n = self.n - 1
        squared = K.square(x)
        scale = self.k
        norm_alpha = self.alpha / (2 * half_n + 1)
        if K.image_dim_ordering() == "th":
            b, f, r, c = self.shape
            squared = K.expand_dims(squared, 0)
            squared = K.spatial_3d_padding(squared,
                                           padding=((half_n, half_n), (0, 0),
                                                    (0, 0)))
            squared = K.squeeze(squared, 0)
            for i in range(half_n * 2 + 1):
                scale += norm_alpha * squared[:, i:i + f, :, :]
        else:
            b, r, c, f = self.shape
            squared = K.expand_dims(squared, -1)
            squared = K.spatial_3d_padding(squared,
                                           padding=((0, 0), (0, 0), (half_n,
                                                                     half_n)))
            squared = K.squeeze(squared, -1)
            for i in range(half_n * 2 + 1):
                scale += norm_alpha * squared[:, :, :, i:i + f]

        scale = K.pow(scale, self.beta)
        return x / scale
コード例 #2
0
 def get_keras_pad(cls, x, pads, dim, data_format=None):
     """
     implement pad in conv or pool operator
     :param x: input tensor
     :param pads: pads attribute in conv or pool operator
     :param dim: the pad dim
     :param data_format: data format of x
     :return: the result tensor of input tensor implementing padding operation
     """
     if sum(pads) == 0:
         return x
     if len(pads) == dim * 2:
         pads = list(
             np.transpose(
                 np.array(pads).reshape([2, dim]).astype(np.int32)))
         pads = tuple(tuple(i) for i in pads)
     elif len(pads) == dim:
         pads = tuple((i, i) for i in pads)
     if dim == 1:
         return Lambda(lambda _x: K.temporal_padding(_x, pads))(x)
     elif dim == 2:
         return Lambda(
             lambda _x: K.spatial_2d_padding(_x, pads, data_format))(x)
     elif dim == 3:
         return Lambda(
             lambda _x: K.spatial_3d_padding(_x, pads, data_format))(x)
     else:
         raise NotImplementedError(
             "padding with dim {} is not implemented.".format(dim))
コード例 #3
0
ファイル: generator.py プロジェクト: zax0s/QIMP-tools
    def call(self, x, **kwargs):
        x = K.spatial_3d_padding(x, padding=self.padding)

        # we imitate depthwise_conv3d actually
        channels = x.shape[-1]
        x = K.concatenate(
            [
                K.conv3d(
                    x=x[:, :, :, :, i:i + 1],
                    kernel=self.blur_kernel[..., i:i + 1, :],
                    strides=self.pool_size,
                    padding='valid',
                ) for i in range(0, channels)
            ],
            axis=-1,
        )

        return x
コード例 #4
0
ファイル: backend.py プロジェクト: leodestiny/onnx-keras
 def get_keras_pad(cls, x, pads, dim, data_format=None):
     if sum(pads) == 0:
         return x
     if len(pads) == dim * 2:
         pads = list(
             np.transpose(
                 np.array(pads).reshape([2, dim]).astype(np.int32)))
         pads = tuple(tuple(i) for i in pads)
     elif len(pads) == dim:
         pads = tuple((i, i) for i in pads)
     if dim == 1:
         return Lambda(lambda _x: K.temporal_padding(_x, pads))(x)
     elif dim == 2:
         return Lambda(
             lambda _x: K.spatial_2d_padding(_x, pads, data_format))(x)
     elif dim == 3:
         return Lambda(
             lambda _x: K.spatial_3d_padding(_x, pads, data_format))(x)
     else:
         raise NotImplementedError(
             "padding with dim {} is not implemented.".format(dim))
コード例 #5
0
        def normalize_tensor_3d(X):

            X2 = K.square(X)

            half = n // 2

            extra_channels = K.spatial_3d_padding(
                K.permute_dimensions(X2, (1, 2, 3, 4, 0)),
                padding=((0, 0), (0, 0), (half, half)))
            extra_channels = K.permute_dimensions(extra_channels,
                                                  (4, 0, 1, 2, 3))

            Xdims = K.int_shape(X)
            number_of_channels = int(Xdims[-1])

            scale = k
            for i in range(n):
                scale += alpha * extra_channels[:, :, :, :,
                                                i:(i + number_of_channels)]
            scale = scale**beta

            return (X / scale)
コード例 #6
0
def pad_backend(inputs, in_channels, out_channels):
    pad_dim = (out_channels - in_channels) // 2
    inputs = K.expand_dims(inputs, -1)
    inputs = K.spatial_3d_padding(inputs, ((0, 0), (0, 0), (pad_dim, pad_dim)),
                                  'channels_last')
    return K.squeeze(inputs, -1)
コード例 #7
0
def pad_backend(inputs, in_channels, out_channels):
    pad_dim = (out_channels - in_channels) // 2
    return K.spatial_3d_padding(inputs,
                                padding=((0, 0), (0, 0), (pad_dim, pad_dim)))
コード例 #8
0
ファイル: zero_padding3d.py プロジェクト: vishalbelsare/keras
 def call(self, inputs):
     return backend.spatial_3d_padding(inputs,
                                       padding=self.padding,
                                       data_format=self.data_format)
コード例 #9
0
            NNEURON = np.sum(Nneurons)

            from keras.layers import Input, Dense, Activation, LocallyConnected2D, ZeroPadding3D, Dropout, GaussianNoise, Lambda
            from keras.models import Model, Sequential
            from keras import regularizers
            from keras.layers.normalization import BatchNormalization
            from keras.layers.core import ActivityRegularization
            from keras.layers.convolutional import Conv2D, Conv3D
            from keras import constraints
            from keras import backend as K

            model = Sequential()

            model.add(
                Lambda(lambda x: K.spatial_3d_padding(x, ((19, 19), (19, 19),
                                                          (7, 2))),
                       input_shape=(NX, NY, NT, 1)), )

            model.add(
                Conv3D(filters=NTYPE,
                       kernel_size=(KS, KS, KT),
                       strides=(1, 1, 1),
                       padding='valid',
                       data_format=None,
                       dilation_rate=(2, 2, 1),
                       activation=None,
                       use_bias=False,
                       kernel_initializer='glorot_uniform',
                       bias_initializer='zeros',
                       bias_regularizer=None,
                       kernel_regularizer=keras.regularizers.l2(50000),