def build(self, input_shape=None): self.input_spec = layers.InputSpec(shape=input_shape) if hasattr(self.layer, 'built') and not self.layer.built: self.layer.build(input_shape) # initialise p self.p_logit = self.add_variable(name='p_logit', shape=(1,), initializer=tf.keras.initializers.random_uniform(self.init_min, self.init_max), dtype=tf.float32, trainable=True) self.p = tf.nn.sigmoid(self.p_logit[0]) tf.add_to_collection("LAYER_P", self.p) # initialise regulariser / prior KL term input_dim = int(np.prod(input_shape[1:])) weight = self.layer.kernel kernel_regularizer = self.weight_regularizer * tf.reduce_sum(tf.square( weight)) / (1. - self.p) dropout_regularizer = self.p * tf.log(self.p) dropout_regularizer += (1. - self.p) * tf.log(1. - self.p) dropout_regularizer *= self.dropout_regularizer * input_dim regularizer = tf.reduce_sum(kernel_regularizer + dropout_regularizer) # Add the regularisation loss to collection. tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES, regularizer)
def build(self, input_shape): self.input_spec = [kl.InputSpec(shape=input_shape)] shape = [1 for _ in input_shape] for i in self.axis: shape[i] = input_shape[i] self.gamma = self.add_weight(shape=shape, initializer=self.gamma_init, regularizer=self.gamma_regularizer, name='gamma') self.beta = self.add_weight(shape=shape, initializer=self.beta_init, regularizer=self.beta_regularizer, name='beta') self.built = True
def build(self, input_shape): super(ResSASABasicBlock, self).build(input_shape) self.input_spec = layers.InputSpec(shape=input_shape)
def __init__(self, padding=(1, 1)): self.padding = tuple(padding) self.input_spec = [layers.InputSpec(ndim=4)] super().__init__()
def __init__(self, padding: Tuple[int, int] = (1, 1)) -> None: super().__init__() self.padding = tuple(padding) self.input_spec = [layers.InputSpec(ndim=4)]