예제 #1
0
 def call(self, inputs, mask=None):
     pos = K.relu(inputs)
     if K.backend() == 'theano':
         neg = (K.pattern_broadcast(self.alpha, self.param_broadcast) *
                (inputs - K.abs(inputs)) * 0.5)
     else:
         neg = -self.alpha * K.relu(-inputs)
     return pos + neg
 def call(self, inputs, mask=None):
   pos = K.relu(inputs)
   if K.backend() == 'theano':
     neg = (K.pattern_broadcast(self.alpha, self.param_broadcast) *
            (inputs - K.abs(inputs)) * 0.5)
   else:
     neg = -self.alpha * K.relu(-inputs)
   return pos + neg
 def call(self, inputs):
   return K.relu(inputs, alpha=self.alpha)
예제 #4
0
파일: activations.py 프로젝트: lengjia/RRL
def relu(x, alpha=0., max_value=None):
    return K.relu(x, alpha=alpha, max_value=max_value)
예제 #5
0
def relu6(x):
    return K.relu(x, max_value=6)
 def relu6(self, x):
     return K.relu(x, max_value=6)
예제 #7
0
# load dataset: image iterator and label iterator
from prep_data_02_vgg16_catsdogs_03_img_folder_2_iterators import val_batches


# create an constant tensor
tensor1 = K.constant(value=val_img_array, name='tensor1')

# create a input tensor (constant)
input_tensor = layers.Input(tensor=tensor1, name='input_tensor')

# create a input tensor (placeholder) without knowing num of samples
input_tensor = layers.Input(shape=val_img_array.shape[1:], name='input_tensor')

# create a relu tensor (placeholder) with input tensor
relu_tensor = K.relu(input_tensor)

"""
def relu(x, alpha=0., max_value=None):

  Rectified linear unit
  With default values, it returns element-wise `max(x, 0)`

  '  Arguments:\n',
  '      x: A tensor or variable.\n',
  '      alpha: A scalar, slope of negative section (default=`0.`).\n',
  '      max_value: Saturation threshold.\n',

  '  Returns:\n',
  '      A tensor.\n',
"""
예제 #8
0
파일: model.py 프로젝트: curme/TinyZoo
def leaky_relu(h, alpha=1 / 3.0):
    return K.relu(h, alpha=alpha)
예제 #9
0
 def call(self, inputs):
     return K.relu(inputs, alpha=self.alpha)
예제 #10
0
def relu6(x):
  return K.relu(x, max_value=6)
예제 #11
0
def relu_limited(x, alpha=0., max_value=1.):
    return K.relu(x, alpha=alpha, max_value=max_value)
예제 #12
0
def relu(x, alpha=0., max_value=None):
  return K.relu(x, alpha=alpha, max_value=max_value)