def __init__(self, offset_creator_class, weight_basis, filters, kernel_size, strides=(1, 1), padding='valid', data_format=None, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(RProjLocallyConnected2D, self).__init__(offset_creator_class, weight_basis, **kwargs) self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.padding = conv_utils.normalize_padding(padding) if self.padding != 'valid': raise ValueError('Invalid border mode for LocallyConnected2D ' '(only "valid" is supported): ' + padding) self.data_format = conv_utils.normalize_data_format(data_format) self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=4)
def __init__(self, filters, kernel_size,strides=1,padding='same',dilation_rate=1, bias_initializer='zeros',kernel_initializer='glorot_uniform', activation='linear', weights=None, border_mode='valid', subsample_length=1, kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, use_bias=True, input_dim=None, input_length=None, tied_to=None,data_format='channels_last',rank=1,learnedKernel=None,layer_inner=None, **kwargs): if border_mode not in {'valid', 'same'}: raise Exception('Invalid border mode for Convolution1D:', border_mode) self.input_spec = [InputSpec(ndim=3)] self.input_dim = input_dim self.input_length = input_length self.tied_to = tied_to self.learnedKernel = np.array(learnedKernel) #self.tied_to.set_weights([weights, bias]) self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, rank, 'dilation_rate') self.data_format = data_format self.filters = filters if self.tied_to is not None: self.kernel_size = self.tied_to.kernel_size else: self.kernel_size = kernel_size self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.activation = activations.get(activation) assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}' self.border_mode = border_mode self.subsample_length = subsample_length self.subsample = (subsample_length, 1) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.use_bias = use_bias self.rank = 1 self.layer_inner = layer_inner if self.input_dim: kwargs['input_shape'] = (self.input_length, self.input_dim) #self.layer_inner = kwargs.pop('layer_inner') super(Convolution1D_tied, self).__init__(**kwargs)
def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', data_format=None, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, implementation=1, **kwargs): super().__init__(**kwargs) self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, 2, 'strides', allow_zero=True) self.padding = conv_utils.normalize_padding(padding) if self.padding != 'valid' and implementation == 1: raise ValueError( 'Invalid border mode for LocallyConnected2D ' '(only "valid" is supported if implementation is 1): ' + padding) self.data_format = conv_utils.normalize_data_format(data_format) self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.implementation = implementation self.input_spec = InputSpec(ndim=4)
def __init__(self, filters, kernel_size, rank=1, strides=1, padding='valid', data_format='channels_last', dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(Conv1D_linearphase, self).__init__(**kwargs) self.rank = rank self.filters = filters self.kernel_size_ = kernel_size if kernel_size % 2: self.kernel_size = conv_utils.normalize_tuple( kernel_size // 2 + 1, rank, 'kernel_size') else: self.kernel_size = conv_utils.normalize_tuple( kernel_size // 2, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2)
def __init__(self, pool_function, pool_size, strides, padding='valid', data_format='channels_last', name=None, **kwargs): super(Pooling3D, self).__init__(name=name, **kwargs) if data_format is None: data_format = backend.image_data_format() if strides is None: strides = pool_size self.pool_function = pool_function self.pool_size = conv_utils.normalize_tuple(pool_size, 3, 'pool_size') self.strides = conv_utils.normalize_tuple(strides, 3, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.input_spec = InputSpec(ndim=5)
def __init__(self, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='he_complex', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, seed=None, spectral_parametrization=False, **kwargs): super(_ComplexConv, self).__init__(**kwargs) self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = cinitializers.get(kernel_initializer) self.bias_initializer = cinitializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2) self.spectral_parametrization = spectral_parametrization self.seed = seed if seed is not None else np.random.randint(1, 1e6)
def __init__(self, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, tied_to=None, **kwargs): super().__init__(filters, kernel_size, **kwargs) self.bias_constraint = constraints.get(bias_constraint) self.supports_masking = True self.tied_to = tied_to self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, 2, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=4)
def __init__(self, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, Masked=False, kernel_mask_val=None, **kwargs): super(MaskedConv, self).__init__(**kwargs) self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2) self.Masked = Masked self.kernel_mask_val = kernel_mask_val
def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', group=1, data_format=None, dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(GroupConv2D, self).__init__( filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activation, use_bias=use_bias, kernel_initializer=kernel_initializer, bias_initializer=bias_initializer, kernel_regularizer=kernel_regularizer, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, kernel_constraint=kernel_constraint, bias_constraint=bias_constraint, **kwargs) self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.padding = conv_utils.normalize_padding(padding) self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2, 'dilation_rate') self.channel_axis = None self.group = group
def __init__(self, filters, kernel_size, strides=1, padding='valid', dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, function=None, arguments=None, normalized_position=True, **kwargs): super(SemiConv2D, self).__init__(**kwargs) self.rank = 2 self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, self.rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, self.rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, self.rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2) self.function = function self.arguments = arguments if arguments else {} self.normalized_position = normalized_position
def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, template_tensor=None, clusterid=None, **kwargs): super(ModifiedConv2D, self).__init__(**kwargs) self.rank = 2 self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, 2, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2) self.template_tensor = template_tensor self.clusterid = clusterid
def __init__(self, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, num_bits=8, **kwargs): super(QuantizedConv2D, self).__init__(**kwargs) self.rank = 2 self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, self.rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, self.rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, self.rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.num_bits = num_bits self.quantization = _Quantization(num_bits=self.num_bits) self.input_spec = InputSpec(ndim=self.rank + 2)
def __init__(self, rank, filters, kernel_size, time_distributed, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, #dynamic_activation = 'softmax', use_bias=False, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(adaptive_conv, self).__init__(**kwargs) self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.time_distributed = time_distributed self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) #self.dynamic_activation = activations.relu(alpha=0.2) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint)
def __init__(self, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, kl_lambda = 1e-6, drop_prob = 0.20, batch_size = 8, **kwargs): super(Conv2D_wd, self).__init__(**kwargs) self.rank = 2 self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.kl_lambda = kl_lambda self.drop_prob = drop_prob self.batch_size = batch_size
def __init__(self, filters, kernel_size, strides=1, padding='same', data_format=None, activation='tanh', kernel_initializer='glorot_uniform', bias_initializer='zeros', use_bias=True, ** kwargs): super(self_conv1d, self).__init__(**kwargs) self.rank = 1 self.filters = filters self.data_format = conv_utils.normalize_data_format(data_format) self.kernel_size = conv_utils.normalize_tuple(kernel_size, self.rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, self.rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.activation = activations.get(activation) self.use_bias = use_bias self.supports_masking = True self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer)
def __init__(self, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, output_merge=False, **kwargs): super().__init__(**kwargs) self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple((2, ) + kernel_size, rank + 1, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.output_merge = output_merge
def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', data_format='channels_last', dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): # print(kwargs) super(pruned_Conv2D, self).__init__(**kwargs) self.rank = 2 self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, self.rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, self.rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, self.rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2)
def __init__(self, pool_function, pool_size, strides, padding="valid", data_format=None, name=None, **kwargs): super().__init__(name=name, **kwargs) if data_format is None: data_format = backend.image_data_format() if strides is None: strides = pool_size self.pool_function = pool_function self.pool_size = conv_utils.normalize_tuple(pool_size, 2, "pool_size") self.strides = conv_utils.normalize_tuple(strides, 2, "strides", allow_zero=True) self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.input_spec = InputSpec(ndim=4)
def __init__(self, n_replicas=1, strides=(1, 1), padding='valid', dilation_rate=(1, 1), activation='relu', components=None, **kwargs): super(CosineSimilarity2D, self).__init__(**kwargs) self.n_replicas = n_replicas self.rank = 2 self.strides = conv_utils.normalize_tuple(strides, self.rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, self.rank, 'dilation_rate') self.components = components self.activation = activations.get(activation) if K.image_data_format() != 'channels_last': raise ValueError("The layer requires `K.image_data_format()` to " "be 'channels_last'.")
def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', data_format=None, dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='he_normal', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, rank=2, **kwargs): super(GroupConv2D_Backend, self).__init__(**kwargs) self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2)
def __init__(self, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=False, kernel_regularizer=None, **kwargs): super(GaussScaler, self).__init__(**kwargs) #def __init__(self, num_filters, kernel_size, incoming_channels=1, **kwargs): self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_regularizer = regularizers.get(kernel_regularizer) self.input_spec = InputSpec(ndim=self.rank + 2) #self.input_shape = input_shape if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim')) print(kwargs) self.kernel_size = kernel_size self.num_filters = filters #self.incoming_channels = incoming_channels super(GaussScaler, self).__init__(**kwargs)
def __init__(self, kernel_width=3, sigma=1, filters=1, strides=(1, 1), padding='valid', data_format=None, kernel_initializer='uniform', kernel_regularizer=None, kernel_constraint=None, use_bias=True, bias_initializer='zeros', bias_regularizer=None, bias_constraint=None, activity_regularizer=None, activation=None, **kwargs): # self._output_dim = output_dim self._kernel_width = kernel_width self.kernel_size = (kernel_width, kernel_width) self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.padding = conv_utils.normalize_padding(padding) if self.padding != 'valid': raise ValueError('Invalid border mode for LocallyConnected2D ' '(only "valid" is supported): ' + padding) self.activation = activations.get(activation) self._gaussian = _gaussian(kernel_width, sigma) self.filters = filters self.data_format = K.normalize_data_format(data_format) self.kernel_initializer = initializers.get(kernel_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.use_bias = use_bias self.bias_initializer = initializers.get(bias_initializer) self.bias_regularizer = regularizers.get(bias_regularizer) self.bias_constraint = constraints.get(bias_constraint) self.activity_regularizer = regularizers.get(activity_regularizer) super(Focus, self).__init__(**kwargs)
def __init__(self, pool_size=(2, 2), strides=None, padding='same', data_format=None, **kwargs): super(UnPooling2D, self).__init__(**kwargs) data_format = conv_utils.normalize_data_format(data_format) if strides is None: strides = pool_size self.pool_size = conv_utils.normalize_tuple(pool_size, 2, 'pool_size') self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.input_spec = [InputSpec(ndim=4), InputSpec(ndim=4)] self.expected_out_shape = None self.size = None if padding is not 'same': print('This Layer is implemented only\ for padding="same".') if pool_size != strides: print('pool_size:', pool_size, 'strides:', strides) raise ValueError('Keep strides and pool_size equal.')
def __init__(self, rank, patch_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', activity_regularizer=None, kernel_constraint=None, **kwargs): super(PatchExtracter, self).__init__(**kwargs) self.rank = rank self.patch_size = conv_utils.normalize_tuple(patch_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.input_spec = InputSpec(ndim=self.rank + 2)
def __init__(self, fwh_projector, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): # embed() super(_RProjFWHConv, self).__init__(**kwargs) self.fwh_projector = fwh_projector self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2)
def __init__(self, pool_size=2, strides=None, padding='valid', default_value=0.0, terminate_mask=False, **kwargs): assert padding in ['valid', 'same'], "Invalid value for `padding`: " + padding super().__init__(**kwargs) if strides is None: strides = pool_size self.pool_size = conv_utils.normalize_tuple(pool_size, 1, 'pool_size') self.strides = conv_utils.normalize_tuple(strides, 1, 'strides') self.padding = conv_utils.normalize_padding(padding) self.default_value = default_value self.terminate_mask = terminate_mask self.input_spec = None # removed ndim restriction: InputSpec(ndim=3) return
def __init__(self, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, rank=2, **kwargs): super(GradientConv2D, self).__init__(**kwargs) self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.data_format = conv_utils.normalize_data_format(data_format) self.padding = conv_utils.normalize_padding(padding) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.sx = np.array([[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]]) # self.sx = K.constant(sx, shape=(3, 3)) self.sy = np.array([[1, 2, 1], [0, 0, 0], [-1, -2, -1]])
def test_invalid_padding(): with pytest.raises(ValueError): conv_utils.normalize_padding('diagonal')
def __init__(self, num_filters=1, kernel_size=(1, 1), bin_sizes=[1, 2, 3, 6], pool_mode='avg', padding='valid', activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): """ Initialize a new Pyramid Pooling Module. Args: num_filters: the number of filters per convolutional unit bin_sizes: sizes for pooling bins pool_mode: pooling mode to use padding: One of `"valid"` or `"same"` (case-insensitive). activation: Activation function to use use_bias: whether layer uses a bias vector kernel_initializer: Initializer for kernel weights bias_initializer: Initializer for bias vector kernel_regularizer: Regularizer function applied to kernel weights bias_regularizer: Regularizer function applied to bias vector activity_regularizer: Regularizer function applied to output kernel_constraint: Constraint function applied to kernel bias_constraint: Constraint function applied to bias vector kwargs: keyword arguments for Layer super constructor Returns: None """ if padding != 'same' and any(x > 1 for x in kernel_size): raise ValueError( "padding should be 'same' if the kernel size is larger than (1, 1)" ) # setup instance variables self.input_spec = InputSpec(ndim=4) self.num_filters = num_filters self.kernel_size = kernel_size self.bin_sizes = bin_sizes self.pool_mode = pool_mode self.padding = conv_utils.normalize_padding(padding) self.data_format = 'channels_last' self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) # initialize the kernels and biases self.kernels = None self.biases = None # call the super constructor super(PyramidPoolingModule, self).__init__(**kwargs)
def __init__(self, filters, kernel_size, retention_ratio=None, learn_retention_ratio=False, strides=(1, 1), padding='valid', data_format=None, dilation_rate=(1, 1), activation='tanh', recurrent_activation='hard_sigmoid', use_bias=True, kernel_initializer='glorot_uniform', recurrent_initializer='orthogonal', bias_initializer='zeros', unit_forget_bias=True, kernel_regularizer=None, recurrent_regularizer=None, bias_regularizer=None, kernel_constraint=None, recurrent_constraint=None, bias_constraint=None, dropout=0., recurrent_dropout=0., **kwargs): super(ConvlpRNN2DCell, self).__init__(**kwargs) self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2, 'dilation_rate') self.activation = activations.get(activation) self.recurrent_activation = activations.get(recurrent_activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.recurrent_initializer = initializers.get(recurrent_initializer) self.bias_initializer = initializers.get(bias_initializer) self.unit_forget_bias = unit_forget_bias self.kernel_regularizer = regularizers.get(kernel_regularizer) self.recurrent_regularizer = regularizers.get(recurrent_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.recurrent_constraint = constraints.get(recurrent_constraint) self.bias_constraint = constraints.get(bias_constraint) if K.backend() == 'theano' and (dropout or recurrent_dropout): warnings.warn( 'RNN dropout is no longer supported with the Theano backend ' 'due to technical limitations. ' 'You can either set `dropout` and `recurrent_dropout` to 0, ' 'or use the TensorFlow backend.') dropout = 0. recurrent_dropout = 0. self.dropout = min(1., max(0., dropout)) self.recurrent_dropout = min(1., max(0., recurrent_dropout)) self.state_size = (self.filters, self.filters) self._dropout_mask = None self._recurrent_dropout_mask = None self._learn_retention_ratio = learn_retention_ratio self._num_units = filters self.set_retention_ratio = retention_ratio
def __init__( self, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, # key of activation use_bias=True, normalize_weight=False, kernel_initializer='he_complex', bias_initializer='zeros', gamma_diag_initializer=sqrt_init, gamma_off_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, gamma_diag_regularizer=None, gamma_off_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, gamma_diag_constraint=None, gamma_off_constraint=None, init_criterion='he', seed=None, spectral_parametrization=False, transposed=False, epsilon=1e-7, **kwargs): super(_ComplexConv, self).__init__(**kwargs) self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activation self.use_bias = use_bias self.normalize_weight = normalize_weight self.init_criterion = init_criterion self.spectral_parametrization = spectral_parametrization self.transposed = transposed self.epsilon = epsilon self.kernel_initializer = sanitizedInitGet(kernel_initializer) self.bias_initializer = sanitizedInitGet(bias_initializer) self.gamma_diag_initializer = sanitizedInitGet(gamma_diag_initializer) self.gamma_off_initializer = sanitizedInitGet(gamma_off_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.gamma_diag_regularizer = regularizers.get(gamma_diag_regularizer) self.gamma_off_regularizer = regularizers.get(gamma_off_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.gamma_diag_constraint = constraints.get(gamma_diag_constraint) self.gamma_off_constraint = constraints.get(gamma_off_constraint) self.seed = seed if seed is not None else np.random.randint(1, 1e6) self.input_spec = InputSpec(ndim=self.rank + 2) # The following are initialized later self.kernel_shape = None self.kernel = None self.gamma_rr = None self.gamma_ii = None self.gamma_ri = None self.bias = None
def __init__( self, rank, filters, kernel_size, strides=1, padding="valid", data_format=None, dilation_rate=1, groups=1, activation=None, use_bias=True, kernel_initializer="glorot_uniform", bias_initializer="zeros", kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, trainable=True, name=None, conv_op=None, **kwargs, ): super().__init__( trainable=trainable, name=name, activity_regularizer=regularizers.get(activity_regularizer), **kwargs, ) self.rank = rank if isinstance(filters, float): filters = int(filters) if filters is not None and filters <= 0: raise ValueError("Invalid value for argument `filters`. " "Expected a strictly positive value. " f"Received filters={filters}.") self.filters = filters self.groups = groups or 1 self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, "kernel_size") self.strides = conv_utils.normalize_tuple(strides, rank, "strides", allow_zero=True) self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, "dilation_rate") self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(min_ndim=self.rank + 2) self._validate_init() self._is_causal = self.padding == "causal" self._channels_first = self.data_format == "channels_first" self._tf_data_format = conv_utils.convert_data_format( self.data_format, self.rank + 2)