def __init__(self, padding=(1, 1), data_format=None, **kwargs): super(ReflectPadding2D, self).__init__(**kwargs) if isinstance(padding, int): self.padding = ((padding, padding), (padding, padding)) elif hasattr(padding, '__len__'): if len(padding) != 2: raise ValueError('`padding` should have two elements. ' 'Found: ' + str(padding)) height_padding = keras_utils.conv_utils.normalize_tuple( padding[0], 2, "1st entry of padding") width_padding = keras_utils.conv_utils.normalize_tuple( padding[1], 2, "2nd entry of padding") self.padding = (height_padding, width_padding) else: raise ValueError('`padding` should be either an int, ' 'a tuple of 2 ints ' '(symmetric_height_pad, symmetric_width_pad), ' 'or a tuple of 2 tuples of 2 ints ' '((top_pad, bottom_pad), (left_pad, right_pad)). ' 'Found: ' + str(padding)) self.data_format = keras_utils.conv_utils.normalize_data_format( data_format) self.input_spec = keras_engine.InputSpec(ndim=4)
def __init__(self, upsampling=(2, 2), output_size=None, data_format=None, **kwargs): super(BilinearUpsampling, self).__init__(**kwargs) #self.data_format = conv_utils.normalize_data_format(data_format) #self.data_format = K.backend.common.normalize_data_format(data_format) if keras.__version__ > "2.2.0": from keras.backend import normalize_data_format self.data_format = normalize_data_format(data_format) else: from keras.utils.conv_utils import normalize_data_format self.data_format = normalize_data_format(data_format) self.input_spec = KE.InputSpec(ndim=4) if output_size: self.output_size = conv_utils.normalize_tuple( output_size, 2, 'output_size') self.upsampling = None else: self.output_size = None self.upsampling = conv_utils.normalize_tuple( upsampling, 2, 'upsampling')
def __init__(self, axis=-1, center=True, scale=True, **kwargs): super(InstanceNormalization2D, self).__init__(**kwargs) self.axis = int(axis) self.center = bool(center) self.scale = bool(scale) self.input_spec = keras_engine.InputSpec(ndim=4)
def __init__(self, units, output_units, output_fn=lambda x: x, activation='tanh', recurrent_activation='hard_sigmoid', use_bias=True, kernel_initializer='glorot_uniform', autoregressive_initializer='glorot_uniform', recurrent_initializer='orthogonal', bias_initializer='zeros', kernel_regularizer=None, autoregressive_regularizer=None, recurrent_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, autoregressive_constraint=None, recurrent_constraint=None, bias_constraint=None, dropout=0., recurrent_dropout=0., stop_ar_gradient=False, **kwargs): super(AutoregressiveGRU, self).__init__(**kwargs) self.output_fn = output_fn self.units = units self.output_units = output_units self.activation = activations.get(activation) self.recurrent_activation = activations.get(recurrent_activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.autoregressive_initializer = initializers.get( autoregressive_initializer) self.recurrent_initializer = initializers.get(recurrent_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.autoregressive_regularizer = regularizers.get( autoregressive_regularizer) self.recurrent_regularizer = regularizers.get(recurrent_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.autoregressive_constraint = constraints.get( autoregressive_constraint) self.recurrent_constraint = constraints.get(recurrent_constraint) self.bias_constraint = constraints.get(bias_constraint) self.dropout = min(1., max(0., dropout)) self.recurrent_dropout = min(1., max(0., recurrent_dropout)) self.stop_ar_gradient = stop_ar_gradient self.state_spec = engine.InputSpec(shape=(None, self.units))
def build(self, input_shape): self.input_spec = [engine.InputSpec(shape=input_shape)] shape = (int(input_shape[self.axis]), ) self.gamma = K.variable(self.gamma_init(shape), name='%s_gamma' % self.name) self.beta = K.variable(self.beta_init(shape), name='%s_beta' % self.name) self.trainable_weights = [self.gamma, self.beta] if self.initial_weights is not None: self.set_weights(self.initial_weights) del self.initial_weights
def build(self, input_shape): if isinstance(input_shape, list): input_shape = input_shape[0] batch_size = input_shape[0] if self.stateful else None self.input_dim = input_shape[2] self.input_spec[0] = engine.InputSpec(shape=(batch_size, None, self.input_dim)) if isinstance(self.output_fn, keras.models.Model): self.output_fn.build(input_shape=(batch_size, self.units)) self.trainable_weights += self.output_fn.trainable_weights # add regularization losses for loss in self.output_fn.losses: self.add_loss(loss) self.states = [None, None] if self.stateful: self.reset_states() self.kernel = self.add_weight(shape=(self.input_dim, self.units * 3), name='kernel', initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) self.autoregressive_kernel = self.add_weight( shape=(self.output_units, self.units * 3), name='autoregressive_kernel', initializer=self.autoregressive_initializer, regularizer=self.autoregressive_regularizer, constraint=self.autoregressive_constraint) self.recurrent_kernel = self.add_weight( shape=(self.units, self.units * 3), name='recurrent_kernel', initializer=self.recurrent_initializer, regularizer=self.recurrent_regularizer, constraint=self.recurrent_constraint) if self.use_bias: self.bias = self.add_weight(shape=(self.units * 3, ), name='bias', initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) else: self.bias = None self.kernel_z = self.kernel[:, :self.units] self.autoregressive_kernel_z = self.autoregressive_kernel[:, :self. units] self.recurrent_kernel_z = self.recurrent_kernel[:, :self.units] self.kernel_r = self.kernel[:, self.units:self.units * 2] self.autoregressive_kernel_r = self.autoregressive_kernel[:, self.units: self.units * 2] self.recurrent_kernel_r = self.recurrent_kernel[:, self.units:self.units * 2] self.kernel_h = self.kernel[:, self.units * 2:] self.autoregressive_kernel_h = self.autoregressive_kernel[:, self.units * 2:] self.recurrent_kernel_h = self.recurrent_kernel[:, self.units * 2:] if self.use_bias: self.bias_z = self.bias[:self.units] self.bias_r = self.bias[self.units:self.units * 2] self.bias_h = self.bias[self.units * 2:] else: self.bias_z = None self.bias_r = None self.bias_h = None self.built = True