Exemplo n.º 1
0
from functions.Function import Function
import numpy as np

### --- Define sigmoid --- ###


def sigmoid_(z):

    return 1 / (1 + np.exp(-z))


def sigmoid_derivative(z):

    return sigmoid_(z) * (1 - sigmoid_(z))


sigmoid = Function(sigmoid_, sigmoid_derivative)
Exemplo n.º 2
0
from functions.Function import Function
import numpy as np

### --- Define softplus --- ###


def softplus_(z):

    return np.log(1 + np.exp(z))


def softplus_derivative(z):

    return sigmoid_(z)


softplus = Function(softplus_, softplus_derivative)
Exemplo n.º 3
0
from functions.Function import Function
import numpy as np

### --- Define sigmoid cross entropy --- ###


def sigmoid_cross_entropy_(z, y):

    p = sigmoid.f(z)

    return -np.mean(y * np.log(p) + (1 - y) * np.log(1 - p))


def sigmoid_cross_entropy_derivative(z, y):

    p = sigmoid.f(z)

    return (-y / p + (1 - y) / (1 - p)) * sigmoid.f_prime(z)


sigmoid_cross_entropy = Function(sigmoid_cross_entropy_,
                                 sigmoid_cross_entropy_derivative)
Exemplo n.º 4
0
from functions.Function import Function
from functions.softmax import softmax_
import numpy as np

### --- Define softmax cross-entropy --- ###


def softmax_cross_entropy_(z, y, epsilon=0.0001):

    p = softmax_(z)
    p = np.maximum(p, epsilon)  #Regularize in case p is small

    return -y.dot(np.log(p))


def softmax_cross_entropy_derivative(z, y):

    return softmax_(z) - y


softmax_cross_entropy = Function(softmax_cross_entropy_,
                                 softmax_cross_entropy_derivative)
Exemplo n.º 5
0
from functions.Function import Function
import numpy as np

### --- Define softmax --- ###


def softmax_(z):

    z = z - np.amax(z)

    return np.exp(z) / np.sum(np.exp(z))


def softmax_derivative(z):

    z = z - np.amax(z)

    return np.multiply.outer(softmax_(z), 1 - softmax_(z))


softmax = Function(softmax_, softmax_derivative)
Exemplo n.º 6
0
from functions.Function import Function
import numpy as np

### --- Define Mean-Squared Error --- ###


def mean_squared_error_(z, y):

    return 0.5 * np.square(z - y).mean()


def mean_squared_error_derivative(z, y):

    return z - y


mean_squared_error = Function(mean_squared_error_,
                              mean_squared_error_derivative)
Exemplo n.º 7
0
from functions.Function import Function
import numpy as np

### --- Define tanh --- ###


def tanh_(z):

    return np.tanh(z)


def tanh_derivative(z):

    return 1 - np.tanh(z)**2


tanh = Function(tanh_, tanh_derivative)
Exemplo n.º 8
0
from functions.Function import Function
import numpy as np

### --- Define Identity --- ###


def identity_(z):

    return z


def identity_derivative(z):

    return np.ones_like(z)


identity = Function(identity_, identity_derivative)
Exemplo n.º 9
0
from functions.Function import Function
import numpy as np

### --- Define ReLu --- ###

right_slope = 1
left_slope = 0


def relu_(h):

    return np.maximum(0, right_slope * h) - np.maximum(0, left_slope * (-h))


def relu_derivative(h):

    return (h > 0) * (right_slope - left_slope) + left_slope


relu = Function(relu_, relu_derivative)