Esempio n. 1
0
 def __init__(self, filters,
              number_of_classes,
              strides=1,
              group=1,
              data_format=None,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              triangular=False,
              **kwargs):
     super(ConditionalConv11, self).__init__(**kwargs)
     self.filters = filters
     self.kernel_size = conv_utils.normalize_tuple((1, 1), 2, 'kernel_size')
     self.number_of_classes = number_of_classes
     self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
     self.padding = conv_utils.normalize_padding('same')
     self.group = group
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(1, 2, 'dilation_rate')
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.activity_regularizer = regularizers.get(activity_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
     self.triangular = triangular
Esempio n. 2
0
 def __init__(self, filters, locnet,
              strides=1,
              data_format=None,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(NINConv11, self).__init__(**kwargs)
     self.filters = int(filters)
     self.locnet = locnet
     self.kernel_size = conv_utils.normalize_tuple((1, 1), 2, 'kernel_size')
     self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
     self.padding = conv_utils.normalize_padding('same')
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(1, 2, 'dilation_rate')
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.activity_regularizer = regularizers.get(activity_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
Esempio n. 3
0
    def __init__(
            self,
            channel_multiplier,
            kernel_size,
            strides=(1, 1),
            padding='valid',
            #data_format=None,
            dilation_rate=(1, 1),
            #activation=None,
            use_bias=False,
            kernel_initializer='glorot_uniform',
            bias_initializer='zeros',
            kernel_regularizer=None,
            bias_regularizer=None,
            #activity_regularizer=None,
            kernel_constraint=None,
            bias_constraint=None,
            **kwargs):
        super(DepthwiseConv2D, self).__init__(**kwargs)

        rank = 2
        self.channel_multiplier = channel_multiplier
        self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                      'kernel_size')
        self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
        self.padding = conv_utils.normalize_padding(padding)
        self.dilation_rate = conv_utils.normalize_tuple(
            dilation_rate, rank, 'dilation_rate')
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
Esempio n. 4
0
 def __init__(self,
              rank,
              filters,
              kernel_size,
              strides=1,
              padding='valid',
              data_format=None,
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              name=None,
              **kwargs):
     super(Conv, self).__init__()
     self.rank = rank
     self.filters = filters
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                   'kernel_size')
     self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     if (self.padding == 'causal' and not isinstance(self, (Conv1D, ))):
         raise ValueError('Causal padding is only supported for `Conv1D`'
                          'and ``SeparableConv1D`.')
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, rank, 'dilation_rate')
     self.activation = parse_activation(activation, self)
     self.use_bias = use_bias
     self.kernel_initializer = parse_initializer(kernel_initializer, self)
     self.bias_initializer = parse_initializer(bias_initializer, self)
Esempio n. 5
0
 def __init__(self,
              filters,
              kernel_size,
              strides=1,
              padding='valid',
              data_format=None,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
   super(LocallyConnected1D, self).__init__(**kwargs)
   self.filters = filters
   self.kernel_size = conv_utils.normalize_tuple(kernel_size, 1, 'kernel_size')
   self.strides = conv_utils.normalize_tuple(strides, 1, 'strides')
   self.padding = conv_utils.normalize_padding(padding)
   if self.padding != 'valid':
     raise ValueError('Invalid border mode for LocallyConnected1D '
                      '(only "valid" is supported): ' + padding)
   self.data_format = conv_utils.normalize_data_format(data_format)
   self.activation = activations.get(activation)
   self.use_bias = use_bias
   self.kernel_initializer = initializers.get(kernel_initializer)
   self.bias_initializer = initializers.get(bias_initializer)
   self.kernel_regularizer = regularizers.get(kernel_regularizer)
   self.bias_regularizer = regularizers.get(bias_regularizer)
   self.activity_regularizer = regularizers.get(activity_regularizer)
   self.kernel_constraint = constraints.get(kernel_constraint)
   self.bias_constraint = constraints.get(bias_constraint)
   self.input_spec = InputSpec(ndim=3)
Esempio n. 6
0
 def __init__(self, filters,
              kernel_size,
              number_of_classes,
              strides=1,
              padding='valid',
              data_format=None,
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(ConditionalDepthwiseConv2D, self).__init__(**kwargs)
     self.filters = int(filters)
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size')
     self.number_of_classes = number_of_classes
     self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2, 'dilation_rate')
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.activity_regularizer = regularizers.get(activity_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
Esempio n. 7
0
 def __init__(self,
              filters,
              kernel_size,
              strides=1,
              padding='valid',
              dilation_rate=1,
              kernel_initializer='glorot_uniform',
              kernel_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              demod=True,
              **kwargs):
     super(Conv2DMod, self).__init__(**kwargs)
     self.filters = filters
     self.rank = 2
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2,
                                                   'kernel_size')
     self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, 2, 'dilation_rate')
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.activity_regularizer = regularizers.get(activity_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.demod = demod
     self.input_spec = [InputSpec(ndim=4), InputSpec(ndim=2)]
 def __init__(self,
              mask,
              filters,
              padding='valid',
              data_format=None,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(LocallyDirected1D, self).__init__(**kwargs)
     self.filters = filters
     self.padding = conv_utils.normalize_padding(padding)
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.activity_regularizer = regularizers.get(activity_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
     self.input_spec = InputSpec(ndim=3)
     self.mask = mask
Esempio n. 9
0
 def __init__(self,
              pool_size: Union[int, Tuple[int, int]] = (2, 2),
              strides: Optional[Union[int, Tuple[int, int]]] = None,
              padding: str = 'valid',
              data_format: Optional[str] = None,
              name: Optional[str] = None,
              **kwargs):
     """
     :param pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
         specifying the size of the pooling window.
         Can be a single integer to specify the same value for all spatial dimensions.
     :param strides: An integer or tuple/list of 2 integers, specifying the strides of the pooling operation.
         Can be a single integer to specify the same value for all spatial dimensions.
     :param padding: A string. The padding method, either 'valid' or 'same'. Case-insensitive.
     :param data_format: A string, one of `channels_last` (default) or `channels_first`.
         The ordering of the dimensions in the inputs.
         `channels_last` corresponds to inputs with shape
         `(batch, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch, channels, height, width)`.
     :param name: A string, the name of the layer.
     """
     super(ComplexPooling2D, self).__init__(name=name, **kwargs)
     if data_format is None:
         data_format = backend.image_data_format()
     if strides is None:
         strides = pool_size
     self.pool_size = conv_utils.normalize_tuple(
         pool_size, 2, 'pool_size'
     )  # Values are checked here. No need to check them latter.
     self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.input_spec = InputSpec(ndim=4)
Esempio n. 10
0
  def __init__(self,
               filters,
               kernel_size,
               cov_kernel_size=(1,12),
               strides=(1, 1),
               padding='valid',
               data_format=None,
               dilation_rate=(1, 1),
               activation='tanh',
               recurrent_activation='hard_sigmoid',
               use_bias=True,
               kernel_initializer='glorot_uniform',
               recurrent_initializer='orthogonal',
               bias_initializer='zeros',
               unit_forget_bias=True,
               kernel_regularizer=None,
               recurrent_regularizer=None,
               bias_regularizer=None,
               kernel_constraint=None,
               recurrent_constraint=None,
               bias_constraint=None,
               dropout=0.,
               recurrent_dropout=0.,
               **kwargs):
    super(ConvLSTM2DCell, self).__init__(**kwargs)
    self.filters = filters
    self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size')
    #############################
    self.cov_kernel_size = cov_kernel_size
    self.kernel_size_1 = conv_utils.normalize_tuple(cov_kernel_size, 2, 'kernel_size')
    ##################################
    self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
    self.padding = conv_utils.normalize_padding(padding)
    self.data_format = conv_utils.normalize_data_format(data_format)
    self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2,
                                                    'dilation_rate')
    self.activation = activations.get(activation)
    self.recurrent_activation = activations.get(recurrent_activation)
    self.use_bias = use_bias

    self.kernel_initializer = initializers.get(kernel_initializer)
    ###############################
    self.cov_kernel_initializer = initializers.get(kernel_initializer)
    ##########################
    self.recurrent_initializer = initializers.get(recurrent_initializer)
    self.bias_initializer = initializers.get(bias_initializer)
    self.unit_forget_bias = unit_forget_bias

    self.kernel_regularizer = regularizers.get(kernel_regularizer)
    self.recurrent_regularizer = regularizers.get(recurrent_regularizer)
    self.bias_regularizer = regularizers.get(bias_regularizer)

    self.kernel_constraint = constraints.get(kernel_constraint)
    self.recurrent_constraint = constraints.get(recurrent_constraint)
    self.bias_constraint = constraints.get(bias_constraint)

    self.dropout = min(1., max(0., dropout))
    self.recurrent_dropout = min(1., max(0., recurrent_dropout))
    self.state_size = (self.filters, self.filters)
Esempio n. 11
0
 def __init__(self,
              rank,
              filters,
              kernel_size,
              strides=1,
              padding="valid",
              data_format=None,
              dilation_rate=1,
              groups=1,
              activation=None,
              use_bias=True,
              kernel_initializer="glorot_uniform",
              bias_initializer="zeros",
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              trainable=True,
              name=None,
              **kwargs):
     super(Conv, self).__init__(
         trainable=trainable,
         name=name,
         activity_regularizer=regularizers.get(activity_regularizer),
         **kwargs)
     self.rank = rank
     if filters is not None and not isinstance(filters, int):
         filters = int(filters)
     self.filters = filters
     self.groups = groups or 1
     if filters is not None and filters % self.groups != 0:
         raise ValueError(
             "The number of filters must be evenly divisible by the number of "
             "groups. Received: groups={}, filters={}".format(
                 groups, filters))
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                   "kernel_size")
     if not all(self.kernel_size):
         raise ValueError("The argument `kernel_size` cannot contain 0(s). "
                          "Received: %s" % (kernel_size, ))
     self.strides = conv_utils.normalize_tuple(strides, rank, "strides")
     self.padding = conv_utils.normalize_padding(padding)
     if self.padding == "causal" and not isinstance(
             self, (Conv1D, SeparableConv1D)):
         raise ValueError("Causal padding is only supported for `Conv1D`"
                          "and ``SeparableConv1D`.")
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, rank, "dilation_rate")
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
     self.input_spec = InputSpec(ndim=self.rank + 2)
Esempio n. 12
0
    def __init__(self,
                 rank,
                 filters,
                 kernel_size,
                 strides=1,
                 padding='valid',
                 data_format=None,
                 dilation_rate=1,
                 groups=1,
                 activation=None,
                 use_bias=True,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 trainable=True,
                 name=None,
                 conv_op=None,
                 **kwargs):
        super(Conv_ada, self).__init__(
            trainable=trainable,
            name=name,
            activity_regularizer=regularizers.get(activity_regularizer),
            **kwargs)
        self.rank = rank

        if isinstance(filters, float):
            filters = int(filters)
        self.filters = filters
        self.groups = groups or 1
        self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                      'kernel_size')
        self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
        self.padding = conv_utils.normalize_padding(padding)
        self.data_format = conv_utils.normalize_data_format(data_format)
        self.dilation_rate = conv_utils.normalize_tuple(
            dilation_rate, rank, 'dilation_rate')

        self.activation = activations.get(activation)
        self.use_bias = use_bias

        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.input_spec = InputSpec(min_ndim=self.rank + 2)

        self._validate_init()
        self._is_causal = self.padding == 'causal'
        self._channels_first = self.data_format == 'channels_first'
        self._tf_data_format = conv_utils.convert_data_format(
            self.data_format, self.rank + 2)
Esempio n. 13
0
 def __init__(self,
              pool_size=(2, 2),
              strides=(2, 2),
              padding='same',
              **kwargs):
     super(MaxPoolingWithArgmax2D, self).__init__(**kwargs)
     self.pool_size = conv_utils.normalize_tuple(pool_size, 2, 'pool_size')
     self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
Esempio n. 14
0
 def __init__(self,
              realisation_batch_size,
              filters,
              kernel_size,
              pseudoinverse_l2_regularisation=0.1,
              regularisaton_relative_to_num_patches=False,
              strides=1,
              padding='valid',
              data_format=None,
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer=tf.keras.initializers.TruncatedNormal(
                  mean=0., stddev=.1),
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              trainable=True,
              name=None,
              **kwargs):
     super(TSConv2D, self).__init__(
         realisation_batch_size=realisation_batch_size,
         pseudoinverse_l2_regularisation=pseudoinverse_l2_regularisation,
         trainable=trainable,
         name=name,
         activity_regularizer=regularizers.get(activity_regularizer),
         **kwargs)
     rank = self.rank = 2
     if filters is not None and not isinstance(filters, int):
         filters = int(filters)
     self.filters = filters
     self.regularisaton_relative_to_num_patches = regularisaton_relative_to_num_patches
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                   'kernel_size')
     self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     if (self.padding == 'causal'
             and not isinstance(self, (Conv1D, SeparableConv1D))):
         raise ValueError('Causal padding is only supported for `Conv1D`'
                          'and ``SeparableConv1D`.')
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, rank, 'dilation_rate')
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
     self.input_spec = (InputSpec(ndim=self.rank + 2),
                        InputSpec(ndim=self.rank + 2))
     self._padding_op = self._get_padding_op()
Esempio n. 15
0
    def __init__(self, rank,
                 lgroups,
                 lfilters,
                 kernel_size,
                 strides=1,
                 padding='valid',
                 data_format=None,
                 dilation_rate=1,
                 activation=None,
                 use_bias=True,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 trainable=True,
                 name=None,
                 **kwargs):
        super(_GroupConv, self).__init__(
                trainable=trainable,
                name=name,
                activity_regularizer=regularizers.get(activity_regularizer),
                **kwargs)
        self.rank = rank
        if rank > 2:
            raise ValueError('The quick group convolution does not support 3D or any higher dimension.')
        initRank = rank
        self.lgroups = lgroups
        self.lfilters = lfilters
        self.kernel_size = conv_utils.normalize_tuple(
                kernel_size, rank, 'kernel_size')
        self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
        self.padding = conv_utils.normalize_padding(padding)
        if (self.padding == 'causal' and not isinstance(self, (Conv1D, SeparableConv1D))):
            raise ValueError('Causal padding is only supported for `Conv1D` and ``SeparableConv1D`.')
        self.data_format = conv_utils.normalize_data_format(data_format)
        self.dilation_rate = conv_utils.normalize_tuple(
             dilation_rate, rank, 'dilation_rate')
        if rank == 1: # when rank=1, expand the tuples to 2D case.
            self.kernel_size = (1, *self.kernel_size)
            self.strides = (1, *self.strides)
            self.dilation_rate = (1, *self.dilation_rate)
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.input_spec = InputSpec(ndim=self.rank + 2)

        self.group_input_dim = None
        self.exp_dim_pos = None
Esempio n. 16
0
    def __init__(self, rank,
                 filters,
                 kernel_size,
                 strides=1,
                 padding='valid',
                 data_format=None,
                 dilation_rate=1,
                 activation=None,
                 use_bias=True,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 trainable=True,
                 name=None,
                 mask=None,
                 **kwargs):
        super().__init__(
            trainable=trainable,
            name=name,
            activity_regularizer=regularizers.get(activity_regularizer),
            **kwargs)
        self.rank = rank
        self.filters = filters
        self.kernel_size = conv_utils.normalize_tuple(
            kernel_size, rank, 'kernel_size')
        self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
        self.padding = conv_utils.normalize_padding(padding)
        self.data_format = conv_utils.normalize_data_format(data_format)
        self.dilation_rate = conv_utils.normalize_tuple(
            dilation_rate, rank, 'dilation_rate')
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.input_spec = InputSpec(ndim=self.rank + 2)

        self.mask_type = mask
        self._mask = None

        # Initialization flag, because the first pass has to be calculated "using batch norm"
        self._initialized = tf.Variable(False, name="initialization_flag", trainable=False)

        self.kernel_shape = None

        self.kernel_weights = None
        self.kernel_log_scale = None
        self.bias = None
Esempio n. 17
0
  def __init__(self,
               kernel_size,
               strides=(1, 1),
               padding='valid',
               depth_multiplier=1,
               data_format=None,
               activation=None,
               use_bias=True,
               depthwise_initializer='glorot_uniform',
               bias_initializer='zeros',
               depthwise_regularizer=None,
               bias_regularizer=None,
               activity_regularizer=None,
               depthwise_constraint=None,
               bias_constraint=None,
               **kwargs):

    super(DepthwiseConv2D, self).__init__(**kwargs)

    self.rank = 2
    self.kernel_size = conv_utils.normalize_tuple(
        kernel_size, self.rank, 'kernel_size')
    if self.kernel_size[0] != self.kernel_size[1]:
      raise NotImplementedError("TF Encrypted currently only supports same "
                                "stride along the height and the width."
                                "You gave: {}".format(self.kernel_size))
    self.strides = conv_utils.normalize_tuple(strides, self.rank, 'strides')
    self.padding = conv_utils.normalize_padding(padding).upper()
    self.depth_multiplier = depth_multiplier
    self.data_format = conv_utils.normalize_data_format(data_format)
    if activation is not None:
      logger.info("Performing an activation before a pooling layer can result "
                  "in unnecessary performance loss. Check model definition in "
                  "case of missed optimization.")
    self.activation = activations.get(activation)
    self.use_bias = use_bias
    self.depthwise_initializer = initializers.get(depthwise_initializer)
    self.bias_initializer = initializers.get(bias_initializer)

    # Not implemented arguments
    default_args_check(depthwise_regularizer,
                       "depthwise_regularizer",
                       "DepthwiseConv2D")
    default_args_check(bias_regularizer,
                       "bias_regularizer",
                       "DepthwiseConv2D")
    default_args_check(activity_regularizer,
                       "activity_regularizer",
                       "DepthwiseConv2D")
    default_args_check(depthwise_constraint,
                       "depthwise_constraint",
                       "DepthwiseConv2D")
    default_args_check(bias_constraint,
                       "bias_constraint",
                       "DepthwiseConv2D")
Esempio n. 18
0
 def __init__(self,
              rank,
              filters,
              kernel_size,
              strides=1,
              padding="valid",
              data_format=None,
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer="glorot_uniform",
              bias_initializer="zeros",
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(Conv, self).__init__(
         activity_regularizer=regularizers.get(activity_regularizer),
         **kwargs)
     self.rank = rank
     self.filters = filters
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                   "kernel_size")
     self.strides = conv_utils.normalize_tuple(strides, rank, "strides")
     self.padding = conv_utils.normalize_padding(padding)
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, rank, "dilation_rate")
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
     self.supports_masking = False
     if self.mode == "diag":
         self.input_spec = [
             InputSpec(ndim=self.rank + 2),
             InputSpec(ndim=self.rank + 2),
         ]
     elif self.mode == "half":
         self.input_spec = [
             InputSpec(ndim=self.rank + 2),
             InputSpec(ndim=self.rank + 3),
         ]
     elif self.mode == "full":
         self.input_spec = [
             InputSpec(ndim=self.rank + 2),
             InputSpec(ndim=2 * self.rank + 3),
         ]
Esempio n. 19
0
 def __init__(self, pool_size=(2, 2), strides=None, dilation_rate=1,
              padding='valid', data_format=None, **kwargs):
     super(DilatedMaxPool2D, self).__init__(**kwargs)
     if strides is None or dilation_rate != 1 and dilation_rate != (1, 1):
         strides = (1, 1)
     self.pool_size = conv_utils.normalize_tuple(pool_size, 2, 'pool_size')
     self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
     self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2,
                                                     'dilation_rate')
     self.padding = conv_utils.normalize_padding(padding)
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.input_spec = InputSpec(ndim=4)
  def __init__(self,
               filters,
               kernel_size,
               strides=(1, 1),
               padding='valid',
               data_format=None,
               dilation_rate=(1, 1),
               activation='tanh',
               recurrent_activation='hard_sigmoid',
               use_bias=True,
               kernel_initializer='glorot_uniform',
               recurrent_initializer='orthogonal',
               bias_initializer='zeros',
               unit_forget_bias=True,
               kernel_regularizer=None,
               recurrent_regularizer=None,
               bias_regularizer=None,
               kernel_constraint=None,
               recurrent_constraint=None,
               bias_constraint=None,
               dropout=0.,
               recurrent_dropout=0.,
               **kwargs):
    super(ConvLSTM2DCell, self).__init__(**kwargs)
    self.filters = filters
    self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size')
    self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
    self.padding = conv_utils.normalize_padding(padding)
    self.data_format = conv_utils.normalize_data_format(data_format)
    self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2,
                                                    'dilation_rate')
    self.activation = activations.get(activation)
    self.recurrent_activation = activations.get(recurrent_activation)
    self.use_bias = use_bias

    self.kernel_initializer = initializers.get(kernel_initializer)
    self.recurrent_initializer = initializers.get(recurrent_initializer)
    self.bias_initializer = initializers.get(bias_initializer)
    self.unit_forget_bias = unit_forget_bias

    self.kernel_regularizer = regularizers.get(kernel_regularizer)
    self.recurrent_regularizer = regularizers.get(recurrent_regularizer)
    self.bias_regularizer = regularizers.get(bias_regularizer)

    self.kernel_constraint = constraints.get(kernel_constraint)
    self.recurrent_constraint = constraints.get(recurrent_constraint)
    self.bias_constraint = constraints.get(bias_constraint)

    self.dropout = min(1., max(0., dropout))
    self.recurrent_dropout = min(1., max(0., recurrent_dropout))
    self.state_size = (self.filters, self.filters)
    self._dropout_mask = None
    self._recurrent_dropout_mask = None
Esempio n. 21
0
 def __init__(self,
              groups,
              filters,
              kernel_size,
              strides=1,
              padding='valid',
              data_format=None,
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              trainable=True,
              rank=2,
              split=True,
              name=None,
              **kwargs):
     super(GroupConv, self).__init__(
         trainable=trainable,
         name=name,
         activity_regularizer=regularizers.get(activity_regularizer),
         **kwargs)
     self.rank = rank
     self.split = split
     self.groups = groups
     self.filters = filters
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                   'kernel_size')
     self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     if (self.padding == 'causal'
             and not isinstance(self, (Conv1D, SeparableConv1D))):
         raise ValueError('Causal padding is only supported for `Conv1D`'
                          'and ``SeparableConv1D`.')
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, rank, 'dilation_rate')
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
     self.input_spec = InputSpec(ndim=self.rank + 2)
Esempio n. 22
0
 def __init__(self, pool_function, pool_size, strides,
              padding='valid', data_format='channels_last',
              name=None, **kwargs):
   super(Pooling1D, self).__init__(name=name, **kwargs)
   if data_format is None:
     data_format = backend.image_data_format()
   if strides is None:
     strides = pool_size
   self.pool_function = pool_function
   self.pool_size = conv_utils.normalize_tuple(pool_size, 1, 'pool_size')
   self.strides = conv_utils.normalize_tuple(strides, 1, 'strides')
   self.padding = conv_utils.normalize_padding(padding)
   self.data_format = conv_utils.normalize_data_format(data_format)
   self.input_spec = InputSpec(ndim=3)
Esempio n. 23
0
 def __init__(self, rank,
              filters,
              kernel_size,
              sampling, # [None, 'up', 'down']
              strides=1,
              act='lrelu',
              noise=True,
              demodulate=True,
              padding='valid',
              data_format='channels_last',
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              trainable=True,
              name=None,
              **kwargs):
   super(ModConv2d, self).__init__(
       trainable=trainable,
       name=name,
       activity_regularizer=regularizers.get(activity_regularizer),
       **kwargs)
   self.rank = rank
   self.filters = filters
   self.noise = noise
   self.demodulate = demodulate
   self.act = act
   self.kernel_size = conv_utils.normalize_tuple(
       kernel_size, rank, 'kernel_size')
   self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
   self.padding = conv_utils.normalize_padding(padding)
   self.data_format = conv_utils.normalize_data_format(data_format)
   self.dilation_rate = conv_utils.normalize_tuple(
       dilation_rate, rank, 'dilation_rate')
   self.activation = activations.get(activation)
   self.use_bias = use_bias
   self.kernel_initializer = initializers.get(kernel_initializer)
   self.bias_initializer = initializers.get(bias_initializer)
   self.kernel_regularizer = regularizers.get(kernel_regularizer)
   self.bias_regularizer = regularizers.get(bias_regularizer)
   self.kernel_constraint = constraints.get(kernel_constraint)
   self.bias_constraint = constraints.get(bias_constraint)
   self.input_spec = [InputSpec(ndim=self.rank + 2), InputSpec(ndim=self.rank)]
   self.sampling = sampling
Esempio n. 24
0
 def __init__(self,
              rank,
              filters,
              kernel_size,
              alpha=0.5,
              strides=1,
              padding='valid',
              data_format='channels_last',
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              trainable=True,
              name=None,
              **kwargs):
     super(OctConv, self).__init__(
         trainable=trainable,
         name=name,
         activity_regularizer=regularizers.get(activity_regularizer),
         **kwargs)
     self.rank = rank
     self.alpha = alpha
     self.filters = filters
     self.lr_filters = int(self.alpha * self.filters)
     self.hr_filters = self.filters - self.lr_filters
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                   'kernel_size')
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, rank, 'dilation_rate')
     self.data_format = data_format
     self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_initializer = initializers.get(bias_initializer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.bias_constraint = constraints.get(bias_constraint)
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.filter_names = ['hh', 'hl', 'lh', 'll']
Esempio n. 25
0
 def __init__(self,
              rank,
              filters,
              kernel_size,
              strides=1,
              padding='valid',
              data_format=None,
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              use_spectral_norm=False,
              is_training=False,
              trainable=True,
              name=None,
              **kwargs):
     super(MyKerasConv, self).__init__(
         trainable=trainable,
         name=name,
         activity_regularizer=regularizers.get(activity_regularizer),
         **kwargs)
     self.rank = rank
     self.filters = filters
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                   'kernel_size')
     self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, rank, 'dilation_rate')
     self.activation = activations.get(activation)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.use_spectral_norm = use_spectral_norm
     self.is_training = is_training
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.kernel_constraint = constraints.get(kernel_constraint)
     self.bias_constraint = constraints.get(bias_constraint)
     self.input_spec = InputSpec(ndim=self.rank + 2)
Esempio n. 26
0
 def __init__(self,
              pool_size,
              strides,
              padding='valid',
              data_format=None,
              name=None,
              **kwargs):
     super(MaxPoolWithArgmax2D, self).__init__(name=name, **kwargs)
     if data_format is None:
         data_format = tf.keras.backend.image_data_format()
     if strides is None:
         strides = pool_size
     self.pool_size = conv_utils.normalize_tuple(pool_size, 2, 'pool_size')
     self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.input_spec = tf.keras.layers.InputSpec(ndim=4)
Esempio n. 27
0
    def __init__(self,
                 _pool_function,
                 pool_size,
                 strides,
                 padding='valid',
                 data_format=None,
                 **kwargs):
        super(Pooling2D, self).__init__(**kwargs)

        if data_format is None:
            data_format = 'channels_last'
        if strides is None:
            strides = pool_size
        self._pool_function = _pool_function
        self.pool_size = conv_utils.normalize_tuple(pool_size, 2, 'pool_size')
        self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
        self.padding = conv_utils.normalize_padding(padding).upper()
        self.data_format = conv_utils.normalize_data_format(data_format)
Esempio n. 28
0
    def __init__(self, rank, filters, kernel_size, dtype, strides=1, padding='valid', data_format=None, dilation_rate=1,
                 groups=1, activation=None, use_bias=True,
                 kernel_initializer=ComplexGlorotUniform(), bias_initializer=Zeros(),
                 kernel_regularizer=None, bias_regularizer=None,  # TODO: Not yet working
                 activity_regularizer=None, kernel_constraint=None, bias_constraint=None,
                 trainable=True, name=None, conv_op=None, **kwargs):
        if kernel_regularizer is not None or bias_regularizer is not None:
            logger.warning(f"Sorry, regularizers are not implemented yet, this parameter will take no effect")
        super(ComplexConv, self).__init__(
            trainable=trainable,
            name=name,
            activity_regularizer=regularizers.get(activity_regularizer),
            **kwargs)
        self.rank = rank
        self.my_dtype = tf.dtypes.as_dtype(dtype)
        # I use no default dtype to make sure I don't forget to give it to my ComplexConv layers
        if isinstance(filters, float):
            filters = int(filters)
        self.filters = filters
        self.groups = groups or 1
        self.kernel_size = conv_utils.normalize_tuple(
            kernel_size, rank, 'kernel_size')
        self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
        self.padding = conv_utils.normalize_padding(padding)
        self.data_format = conv_utils.normalize_data_format(data_format)
        self.dilation_rate = conv_utils.normalize_tuple(
            dilation_rate, rank, 'dilation_rate')

        self.activation = activations.get(activation)
        self.use_bias = use_bias

        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.input_spec = InputSpec(min_ndim=self.rank + 2)

        self._validate_init()
        self._is_causal = self.padding == 'causal'
        self._channels_first = self.data_format == 'channels_first'
        self._tf_data_format = conv_utils.convert_data_format(
            self.data_format, self.rank + 2)
Esempio n. 29
0
 def __init__(self,
              pool_size=2,
              strides=None,
              padding='valid',
              data_format='channels_last',
              name=None,
              dtype=DEFAULT_COMPLEX_TYPE,
              **kwargs):
     self.my_dtype = dtype
     super(ComplexPooling1D, self).__init__(name=name, **kwargs)
     if data_format is None:
         data_format = backend.image_data_format()
     if strides is None:
         strides = pool_size
     self.pool_size = conv_utils.normalize_tuple(pool_size, 1, 'pool_size')
     self.strides = conv_utils.normalize_tuple(strides, 1, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.input_spec = InputSpec(ndim=3)
Esempio n. 30
0
    def __init__(self,
                 filters,
                 kernel_size,
                 kernel_function,
                 strides=(1, 1),
                 padding='valid',
                 data_format=None,
                 dilation_rate=(1, 1),
                 activation=None,
                 use_bias=True,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 **kwargs):
        super(KernelConv2D, self).__init__(**kwargs)
        self.rank = 2
        self.filters = filters
        self.kernel_function = kernel_function
        self.kernel_size = conv_utils.normalize_tuple(kernel_size, self.rank,
                                                      'kernel_size')
        self.strides = conv_utils.normalize_tuple(strides, self.rank,
                                                  'strides')
        self.padding = conv_utils.normalize_padding(padding)
        self.data_format = conv_utils.normalize_data_format(data_format)
        self.dilation_rate = conv_utils.normalize_tuple(
            dilation_rate, self.rank, 'dilation_rate')
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.input_spec = engine.base_layer.InputSpec(ndim=self.rank + 2)

        self.kernel = self.bias = None
    def __init__(self,
                 rank,
                 filters,
                 kernel_size,
                 make_posterior_fn,
                 make_prior_fn,
                 kl_weight=None,
                 kl_use_exact=False,
                 strides=1,
                 padding='valid',
                 data_format=None,
                 dilation_rate=1,
                 activation=None,
                 use_bias=True,
                 activity_regularizer=None,
                 **kwargs):
        super(ConvVariational,
              self).__init__(activity_regularizer=tf.keras.regularizers.get(
                  activity_regularizer),
                             **kwargs)
        self.rank = rank

        self.filters = filters
        self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                      'kernel_size')

        self._make_posterior_fn = make_posterior_fn
        self._make_prior_fn = make_prior_fn
        self._kl_divergence_fn = _make_kl_divergence_penalty(kl_use_exact,
                                                             weight=kl_weight)

        self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
        self.padding = conv_utils.normalize_padding(padding)
        if (self.padding == 'causal'
                and not isinstance(self, (Conv1DVariational))):
            raise ValueError(
                'Causal padding is only supported for `Conv1DVariational`.')
        self.data_format = conv_utils.normalize_data_format(data_format)
        self.dilation_rate = conv_utils.normalize_tuple(
            dilation_rate, rank, 'dilation_rate')
        self.activation = tf.keras.activations.get(activation)
        self.use_bias = use_bias
Esempio n. 32
0
  def test_normalize_padding(self):
    self.assertEqual('same', conv_utils.normalize_padding('SAME'))
    self.assertEqual('valid', conv_utils.normalize_padding('VALID'))

    with self.assertRaises(ValueError):
      conv_utils.normalize_padding('invalid')