def call(self, inputs, mask=None): pos = K.relu(inputs) if K.backend() == 'theano': neg = (K.pattern_broadcast(self.alpha, self.param_broadcast) * (inputs - K.abs(inputs)) * 0.5) else: neg = -self.alpha * K.relu(-inputs) return pos + neg
def call(self, inputs): return K.relu(inputs, alpha=self.alpha)
def relu(x, alpha=0., max_value=None): return K.relu(x, alpha=alpha, max_value=max_value)
def relu6(x): return K.relu(x, max_value=6)
def relu6(self, x): return K.relu(x, max_value=6)
# load dataset: image iterator and label iterator from prep_data_02_vgg16_catsdogs_03_img_folder_2_iterators import val_batches # create an constant tensor tensor1 = K.constant(value=val_img_array, name='tensor1') # create a input tensor (constant) input_tensor = layers.Input(tensor=tensor1, name='input_tensor') # create a input tensor (placeholder) without knowing num of samples input_tensor = layers.Input(shape=val_img_array.shape[1:], name='input_tensor') # create a relu tensor (placeholder) with input tensor relu_tensor = K.relu(input_tensor) """ def relu(x, alpha=0., max_value=None): Rectified linear unit With default values, it returns element-wise `max(x, 0)` ' Arguments:\n', ' x: A tensor or variable.\n', ' alpha: A scalar, slope of negative section (default=`0.`).\n', ' max_value: Saturation threshold.\n', ' Returns:\n', ' A tensor.\n', """
def leaky_relu(h, alpha=1 / 3.0): return K.relu(h, alpha=alpha)
def relu_limited(x, alpha=0., max_value=1.): return K.relu(x, alpha=alpha, max_value=max_value)