Esempio n. 1
0
def get_activation_by_name(activation_name, activation_id=None):
    """ Convert to a bigdl activation layer
        given the name of the activation as a string  """
    import bigdl.nn.layer as BLayer
    activation = None
    activation_name = activation_name.lower()
    if activation_name == "tanh":
        activation = BLayer.Tanh()
    elif activation_name == "sigmoid":
        activation = BLayer.Sigmoid()
    elif activation_name == "hard_sigmoid":
        activation = BLayer.HardSigmoid()
    elif activation_name == "relu":
        activation = BLayer.ReLU()
    elif activation_name == "softmax":
        activation = BLayer.SoftMax()
    elif activation_name == "softplus":
        activation = BLayer.SoftPlus(beta=1.0)
    elif activation_name == "softsign":
        activation = BLayer.SoftSign()
    elif activation_name == "linear":
        activation = BLayer.Identity()
    else:
        raise Exception("Unsupported activation type: %s" % activation_name)
    if not activation_id:
        activation.set_name(activation_id)
    return activation
Esempio n. 2
0
 def create_parametricsoftplus(self, klayer, kclayer):
     alpha = float(klayer.alpha_init)
     beta = float(klayer.beta_init)
     if round(alpha * beta, 4) == 1.0:
         return BLayer.SoftPlus(beta=beta, bigdl_type="float")
     else:
         raise Exception(
             "Only alpha_init = 1/beta_init is supported for now")