Ejemplo n.º 1
0
 def __init__(self,
              input_width,
              output_width,
              hidden_width=None,
              depth=3,
              learning_rate=.1,
              activation=Sigmoid):
     self.network = []
     self.learning_rate = learning_rate
     self.activation = Sigmoid()
     last_width = input_width
     for layer_idx in range(depth - 2):
         if isinstance(hidden_width,
                       (collections.Sequence, tuple, np.ndarray)):
             width = hidden_width[layer_idx]
         elif isinstance(hidden_width, (int, float)):
             width = hidden_width
         else:
             width = np.abs(input_width - output_width) / (depth - 1)
         scale = last_width**-.5
         layer = np.random.normal(scale=scale, size=(width, last_width))
         self.network.append(layer)
         last_width = width
     scale = last_width**-.5
     self.network.append(
         np.random.normal(scale=scale, size=(output_width, last_width)))
Ejemplo n.º 2
0
	def testSteepness(self):
		sigmoid = Sigmoid()
		
		inp = random.random()
		greaterInp = inp * 1.1
		
		value = sigmoid.evaluate(inp)
		greaterValue = sigmoid.evaluate(greaterInp)
		self.assertGreater(greaterValue, value)
Ejemplo n.º 3
0
 def test_derivative(self):
     a = Sigmoid()
     self.assertEqual(a.derivative(1),
                      1 / (1 + np.exp(-1)) * (1 - 1 / (1 + np.exp(-1))))
     self.assertEqual(
         a.derivative(10000),
         1 / (1 + np.exp(-10000)) * (1 - 1 / (1 + np.exp(-10000))))
     self.assertEqual(
         a.derivative(-10000),
         1 / (1 + np.exp(10000)) * (1 - 1 / (1 + np.exp(10000))))
Ejemplo n.º 4
0
	def testOffset(self):
		offset = random.randint(0,10)
		steepness = random.randint(1,10)
		sigmoid = Sigmoid(steepness, offset)
		
		offsetValue = sigmoid.evaluate(-offset)
		offsetTarget = 1/2
		targetRatio = offsetValue / offsetTarget
		
		self.assertLess(targetRatio, 1.1)
		self.assertGreater(targetRatio, 0.9)
Ejemplo n.º 5
0
class LSTM:
    def __init__(self, Wx, Wh, b):
        self.params = [Wx, Wh, b]
        self.grads = [np.zeros_like(Wx), np.zeros_like(Wh), np.zeros_like(b)]
        self.cache = None
        self.sigmoid = Sigmoid()

    def _slice(self, A, H):
        f = A[:, :H]
        g = A[:, H:2 * H]
        i = A[:, 2 * H:3 * H]
        o = A[:, 3 * H:]
        return f, g, i, o

    def forward(self, x, h_prev, c_prev):
        Wx, Wh, b = self.params
        N, H = h_prev.shape
        A = np.dot(x, Wx) + np.dot(h_prev, Wh) + b
        f, g, i, o = self._slice(A, H)
        f = self.sigmoid.forward(f)
        g = np.tanh(g)
        i = self.sigmoid.forward(i)
        o = self.sigmoid.forward(o)
        c_next = f * c_prev + g * i
        h_next = o * np.tanh(c_next)
        self.cache = (x, h_prev, c_prev, (i, f, g, o), c_next)
        return c_next, h_next

    def backward(self, dh_next, dc_next):
        Wx, Wh, b = self.params
        x, h_prev, c_prev, gates, c_next = self.cache
        i, f, g, o = gates
        tanh_c_next = np.tanh(c_next)
        ds = dc_next + (dh_next * o) * (1 - tanh_c_next**2)
        dc_prev = ds * f
        di = ds * g
        df = ds * c_prev
        do = dh_next * tanh_c_next
        dg = ds * i
        di *= i * (1 - i)
        df *= f * (1 - f)
        do *= o * (1 - o)
        dg *= (1 - g**2)
        dA = np.hstack((df, dg, di, do))
        dWh = np.dot(h_prev.T, dA)
        dWx = np.dot(x.T, dA)
        db = dA.sum(axis=0)
        self.grads[0][...] = dWx
        self.grads[1][...] = dWh
        self.grads[2][...] = db
        dx = np.dot(dA, Wx.T)
        dh_prev = np.dot(dA, Wh.T)
        return dx, dh_prev, dc_prev
Ejemplo n.º 6
0
def acc(w1, b1, w2, b2, x, t):
    accuracy_cnt = 0
    affine1 = Affine(w1, b1)
    affine2 = Affine(w2, b2)
    sigmoid = Sigmoid()
    x1 = affine1.forward(x)
    y1 = sigmoid.forward(x1)
    x2 = affine2.forward(y1)
    for i in range(len(t)):
        if abs(x2[i][0] - t[i][0]) < 0.5:
            accuracy_cnt += 1
        #print(x2[i][0],t[i][0])
    print(accuracy_cnt / 64)
Ejemplo n.º 7
0
class TestSigmoid(unittest.TestCase):
    def setUp(self):
        self.sigmoid = Sigmoid()
        self.x = np.random.randn(10, 4)

    def test_forward(self):
        out = self.sigmoid.forward(self.x)
        self.assertEqual((10, 4), out.shape)

    def test_backward(self):
        self.sigmoid.forward(self.x)
        dout = np.random.randn(10, 4)
        dx = self.sigmoid.backward(dout)
        self.assertEqual((10, 4), dx.shape)
Ejemplo n.º 8
0
def main():

    train_data = np.array([[0, 0], [0, 1], [0, 2], [0, 3], [0, 4], [0, 5],
                           [0, 6], [0, 7], [1, 0], [1, 1], [1, 2], [1, 3],
                           [1, 4], [1, 5], [1, 6], [1, 7], [2, 0], [2, 1],
                           [2, 2], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7],
                           [3, 0], [3, 1], [3, 2], [3, 3], [3, 4], [3, 5],
                           [3, 6], [3, 7], [4, 0], [4, 1], [4, 2], [4, 3],
                           [4, 4], [4, 5], [4, 6], [4, 7], [5, 0], [5, 1],
                           [5, 2], [5, 3], [5, 4], [5, 5], [5, 6], [5, 7],
                           [6, 0], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5],
                           [6, 6], [6, 7], [7, 0], [7, 1], [7, 2], [7, 3],
                           [7, 4], [7, 5], [7, 6], [7, 7]])
    label_data = np.array(
        [[0], [1], [2], [3], [4], [5], [6], [7], [1], [0], [3], [2], [5], [4],
         [7], [6], [2], [3], [0], [1], [6], [7], [4], [5], [3], [2], [1], [0],
         [7], [6], [5], [4], [4], [5], [6], [7], [0], [1], [2], [3], [5], [4],
         [7], [6], [1], [0], [3], [2], [6], [7], [4], [5], [2], [3], [0], [1],
         [7], [6], [5], [4], [3], [2], [1], [0]])
    learn_rate = 0.15
    epoch = 600000
    loss_list = []
    w1, b1, w2, b2 = init(input_size=2, hidden_size=16, output_size=1)
    #print(w1, w2, b1, b2)

    for i in range(epoch):
        affine1 = Affine(w1, b1)
        affine2 = Affine(w2, b2)
        sigmoid = Sigmoid()
        loss = MSE()
        x1 = affine1.forward(train_data)
        y1 = sigmoid.forward(x1)
        x2 = affine2.forward(y1)
        ls = loss.mean_square_error(x2, label_data)
        print(ls)
        loss_list.append(ls)
        dout = loss.backward(x2, label_data)
        dx = affine2.backward(dout)
        w2 = w2 - learn_rate * affine2.dw
        b2 = b2 - learn_rate * affine2.db
        dy1 = sigmoid.backward(dx)
        dx = affine1.backward(dy1)
        b1 = b1 - learn_rate * affine1.db
        w1 = w1 - learn_rate * affine1.dw
    #print(w1,w2,b1,b2)

    plt.plot(loss_list)
    plt.show()
    acc(w1, b1, w2, b2, train_data, label_data)
Ejemplo n.º 9
0
class NeuralNetwork:
    def __init__(self,
                 input_width,
                 output_width,
                 hidden_width=None,
                 depth=3,
                 learning_rate=.1,
                 activation=Sigmoid):
        self.network = []
        self.learning_rate = learning_rate
        self.activation = Sigmoid()
        last_width = input_width
        for layer_idx in range(depth - 2):
            if isinstance(hidden_width,
                          (collections.Sequence, tuple, np.ndarray)):
                width = hidden_width[layer_idx]
            elif isinstance(hidden_width, (int, float)):
                width = hidden_width
            else:
                width = np.abs(input_width - output_width) / (depth - 1)
            scale = last_width**-.5
            layer = np.random.normal(scale=scale, size=(width, last_width))
            self.network.append(layer)
            last_width = width
        scale = last_width**-.5
        self.network.append(
            np.random.normal(scale=scale, size=(output_width, last_width)))

    def predict(self, inputs):
        for layer in self.network:
            inputs = self.activation.forward(np.dot(layer, inputs))
        return inputs

    def cost_f(self, predictions, targets):
        return targets - predictions

    def train(self, features, labels):
        outputs = [np.array(features, ndmin=2).T]
        for layer in self.network:
            outputs.append(self.activation.forward(np.dot(layer, outputs[-1])))

        labels = np.array(labels, ndmin=2).T
        errors = self.cost_f(outputs[-1], labels)
        for l_idx, layer in enumerate(self.network[::-1]):
            p_deltas = errors * self.activation.backward(outputs[-l_idx - 1])
            errors = np.dot(layer.T, errors)
            layer += self.learning_rate * np.dot(p_deltas,
                                                 outputs[-l_idx - 2].T)
Ejemplo n.º 10
0
Archivo: nn.py Proyecto: jhd/nntest
    def randomLayersInit(self, layers, startInputs):
        #layers format: [number of input neurons, number of sigs for this layer...]

        if len(layers) <= 2:

            return

        self.network = []
        
        inputs = []
        for input in range(0, layers[0]):
            
            inputs.append(InputNeuron(startInputs[input]))

        self.network.append(Layer(inputs))

        for layerLength in layers[1:]:

            neurons = []

            for neuron in range(0, layerLength):

                neurons.append(Sigmoid([numpy.random.uniform(0, 1) for startWeight in range(0, len(self.network[len(self.network)-1].neurons))], numpy.random.uniform(0, 1)))

            self.network.append(Layer(neurons))
Ejemplo n.º 11
0
class TestSigmoid(unittest.TestCase):
    def setUp(self):
        self.sigmoid = Sigmoid()

    def test_forward(self):
        x = np.array([[1.0, -0.5], [-2.0, 3.0]])
        assert_almost_equal(
            ([[0.73105858, 0.37754067], [0.11920292, 0.95257413]]),
            self.sigmoid.forward(x))

    def test_backward(self):
        x = np.array([[1.0, -0.5], [-2.0, 3.0]])
        self.sigmoid.forward(x)
        dout = 1
        assert_almost_equal(
            np.array([[0.0386563, 0.0552267], [0.0110237, 0.0020409]]),
            self.sigmoid.backward(self.sigmoid.backward(dout)))
Ejemplo n.º 12
0
    def __init__(self, input_dim, sizes):
        self.layers = []
        sizes.insert(0, input_dim)

        for i in range(1, len(sizes) - 1):
            layer = Dense(sizes[i], sizes[i - 1], Relu())
            self.layers.append(layer)

        l = len(sizes)
        layer = Dense(sizes[l - 1], sizes[l - 2], Sigmoid())
        self.layers.append(layer)
Ejemplo n.º 13
0
class Neuron:
	
	def __init__(self, previousRow):
		self.weightedNeurons = getWeightedNeurons(previousRow)
		self.sigmoid = Sigmoid()

	def evaluate(self):
		sum = 0
		for weightedNeuron in self.weightedNeurons:
			sum += weightedNeuron.evaluate()
		if len(self.weightedNeurons) > 0:
			sum /= len(self.weightedNeurons)
		return self.sigmoid.evaluate(sum)
Ejemplo n.º 14
0
    def __init__(self, input_size, hidden_size, output_size):
        I, H, O = input_size, hidden_size, output_size

        W1 = np.random.randn(I, H)
        b1 = np.random.randn(H)

        W2 = np.random.randn(H, O)
        b2 = np.random.randn(O)

        self.grads = []
        self.layers = [Affine(W1, b1), Sigmoid(), Affine(W2, b2)]

        self.params = []
        for l in self.layers:
            self.params += l.params
Ejemplo n.º 15
0
def get_shape_by_name(shape_name, priors):
    import re
    if shape_name == 'sigmoid':
        from sigmoid import Sigmoid
        return Sigmoid(priors)
    if shape_name == 'sigslope':
        from sigslope import Sigslope
        return Sigslope(priors)
    elif shape_name.startswith('poly'):
        m = re.match('poly(\d)', shape_name)
        assert m, 'Illegal polynomial shape name'
        degree = int(m.group(1))
        from poly import Poly
        return Poly(degree, priors)
    elif shape_name == 'spline':
        from spline import Spline
        return Spline()
    else:
        raise AssertionError('Unknown shape: {}'.format(shape_name))
Ejemplo n.º 16
0
 def __init__(self,
              n_weights: int,
              weights=None,
              bias=None,
              ac_function=Sigmoid(),
              lrate=0.1):
     # Verificamos que los parámetros de entrada del constructor sean los correctos
     if type(n_weights) != int:
         raise ValueError(
             "Input n_weights debe ser un número entero positivo")
     elif n_weights < 1:
         raise ValueError(
             "Input n_weights debe ser un número entero positivo")
     elif weights is not None and type(weights) not in [list, np.ndarray]:
         raise ValueError("Los pesos Weights debe ser una lista de floats")
     elif weights is not None and len(weights) != n_weights:
         raise ValueError(
             "el número n_weights debe coincidir con los pesos entregados")
     elif type(ac_function) != Step and type(
             ac_function) != Sigmoid and type(ac_function) != Tanh:
         raise ValueError(
             "La función de activación debe ser Step(), Sigmoid() o bien, Tanh()"
         )
     elif type(lrate) != float:
         raise ValueError("La tasa de aprendizaje lr debe ser un float")
     elif bias is not None and type(bias) != float:
         raise ValueError("El sesgo bias debe ser un float")
     else:
         if weights is None:  # En caso de no contar con los pesos
             self.__weights = 4 * np.random.rand(n_weights) - 2
         else:
             for i in range(len(weights)):
                 if type(weights[i]) not in [float, np.float64]:
                     raise ValueError("cada peso debe ser un float")
             self.__weights = np.array(weights)
         if bias is None:  # En caso de no contar con el bias
             4 * np.random.rand() - 2
         else:
             self.__bias = bias
         self.__length = n_weights
         self.__acfunction = ac_function
         self.__lrate = lrate
Ejemplo n.º 17
0
 def __init__(self, input_size, hidden_size, output_size):
     I  = input_size
     H  = hidden_size
     O  = output_size
     # Initialise heabiness and bias
     W1 = 0.01 * np.random.randn(I, H)
     b1 = np.zeros(H)
     W2 = 0.01 * np.random.randn(H, O)
     b2 = np.zeros(O)
     # Generate layer
     self.layers = [
         Affine(W1, b1),
         Sigmoid(),
         Affine(W2, b2)
     ]
     self.loss_layer = SoftMaxWithLoss()
     # Integrate all weight and gradients in each list
     self.params = []
     self.grads = []
     for layer in self.layers:
         self.params += layer.params
         self.grads  += layer.grads
Ejemplo n.º 18
0
def linearSigmoidProcessor():
    """
    Processor node for linear and Sigmoid operation
    """
    X, W, b = Input(), Input(), Input()

    f = LinearMatrix(X, W, b)
    g = Sigmoid(f)

    X_ = np.array([[-1., -2.], [-1, -2]])
    W_ = np.array([[2., -3], [2., -3]])
    b_ = np.array([-3., -5])

    feed_dict = {X: X_, W: W_, b: b_}

    graph = topological_sort(feed_dict)
    output = forward_pass(g, graph)

    """
    Output should be:
    [[  1.23394576e-04   9.82013790e-01]
    [  1.23394576e-04   9.82013790e-01]]
    """
    print(output, "(according to miniflow - LinearSigmoid)")
 def get_output_layer(self, h):
     sigmoid = Sigmoid()
     a = sigmoid.forward(h)
     out = np.dot(a, self.W) + self.b
     return out
Ejemplo n.º 20
0
 def setUp(self):
     self.sigmoid = Sigmoid()
Ejemplo n.º 21
0
import numpy as np

from perceptron import Perceptron
from sigmoid import Sigmoid

print('Solve Logical Ports using Perceptron!!')
logical_and_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
logical_and_solv = np.array([0, 0, 0, 1]).T

perceptron_and = Perceptron(0.01, logical_and_data, logical_and_solv, 'step')
perceptron_and.run(101, 10)
#
print('Using a sigmoid neuron')
sig = Sigmoid(0.01, logical_and_data, logical_and_solv, 'sigmoid')
sig.run(1001, 100)
Ejemplo n.º 22
0
features = data[0]
labels = data[1]

plt.scatter(features[:, 0], features[:, 1], c=labels, cmap='coolwarm')

x = np.linspace(0, 11, 10)
y = -x + 5

plt.plot(x, y)

g = Graph()
graphObject = g.set_as_default()

# Initialize function wx - b | [1,1] * x - 5
x = Placeholder()
graphObject.placeholders.append(x)  # append placeholder x
w = Variables([1, 1])
graphObject.variables.append(w)  # append variable w
b = Variables(-5)
graphObject.variables.append(b)  # append variable b

z = Addition(MatrixMultiplication(w, x, graphObject), b, graphObject)

# Apply activation function
a = Sigmoid(z, graphObject)

# Execute neural network
sess = Session()
print(sess.run(a, {x: [0, -10]}))
plt.show()
Ejemplo n.º 23
0
 def test_apply(self):
     a = Sigmoid()
     self.assertEqual(a.apply(1), 1 / (1 + np.exp(-1)))
     self.assertEqual(a.apply(10000), 1 / (1 + np.exp(-10000)))
     self.assertEqual(a.apply(-10000), 1 / (1 + np.exp(10000)))
Ejemplo n.º 24
0
 def __init__(self,
              neurons: int,
              n_weights: int,
              weights=None,
              bias=None,
              lrate=0.1,
              ac_function=Sigmoid()):
     if type(neurons) != int:
         raise ValueError(
             "El número de neuronas neurons debe ser un número entero positivo"
         )
     elif neurons < 1:
         raise ValueError(
             "El número de neuronas neurons debe ser un número entero positivo"
         )
     else:
         self.__length = neurons
         self.__input_length = n_weights
         self.__acfunction = ac_function
         self.__lrate = lrate
         self.__neurons = {}
         # Dependiendo de los inputs ingresado inicializamos las neuronas de la capa
         if weights is None and bias is None:
             for i in range(neurons):
                 self.__neurons[i] = Neuron(n_weights=n_weights,
                                            ac_function=ac_function,
                                            lrate=lrate)
         elif bias is None:
             if type(weights) is not dict:
                 raise ValueError(
                     "Input weights debe ser un diccionario con las listas de pesos de cada neurona"
                 )
             elif len(list(weights.keys())) != neurons:
                 raise ValueError(
                     "Número de entradas del diccionario weights debe ser igual al número de neuronas"
                 )
             else:
                 for i in range(neurons):
                     self.__neurons[i] = Neuron(n_weights=n_weights,
                                                weights=weights[i],
                                                ac_function=ac_function,
                                                lrate=lrate)
         elif weights is None:
             if type(bias) is not dict:
                 raise ValueError(
                     "Input bias debe ser un diccionario con los sesgos de cada neurona"
                 )
             elif len(list(bias.keys())) != neurons:
                 raise ValueError(
                     "Número de entradas del diccionario bias debe ser igual al número de neuronas"
                 )
             else:
                 for i in range(neurons):
                     self.__neurons[i] = Neuron(n_weights=n_weights,
                                                bias=bias[i],
                                                ac_function=ac_function,
                                                lrate=lrate)
         else:
             if type(weights) is not dict or type(bias) is not dict:
                 raise ValueError(
                     "El input weights y bias deben ser diccionarios con las listas de pesos y bias de cada "
                     + "neurona respectivamente")
             elif len(list(weights.keys())) != neurons or len(
                     list(bias.keys())) != neurons:
                 raise ValueError(
                     "El número de entradas de los diccionario weights y bias debe "
                     + "ser igual al número de neuronas")
             else:
                 for i in range(neurons):
                     self.__neurons[i] = Neuron(n_weights=n_weights,
                                                weights=weights[i],
                                                bias=bias[i],
                                                ac_function=ac_function,
                                                lrate=lrate)
Ejemplo n.º 25
0
 def __init__(self,neurons: int, activation: Operation = Sigmoid()):
     '''
     Requires an activation function upon initialization
     '''
     super().__init__(neurons)
     self.activation = activation
Ejemplo n.º 26
0
	def __init__(self, previousRow):
		self.weightedNeurons = getWeightedNeurons(previousRow)
		self.sigmoid = Sigmoid()
Ejemplo n.º 27
0
 def setUp(self):
     self.sigmoid = Sigmoid()
     self.x = np.random.randn(10, 4)
Ejemplo n.º 28
0
 def __init__(self):
     self.sigmoid = Sigmoid()
Ejemplo n.º 29
0
import numpy as np
import sys
sys.path.append("./core/")
from dense import Dense
from sigmoid import Sigmoid
from softmax import Softmax
from data_loader import get_images_and_labels, get_test_images_and_labels

if __name__ == '__main__':

    dense = Dense(10, 784)
    dense.load_model("./model/w.npy", "./model/b.npy")
    dense1 = Dense(10, 100)
    sigmoid = Sigmoid()
    loss = Softmax()

    img, labels = get_images_and_labels()
    test_imgs, test_label = get_test_images_and_labels()

    train_label = np.zeros([10, 1])
    train_label[labels[0]] = 1
    inputx = (img[0] - 128) / 256.0

    count = 0
    for i in range(10000):
        inputx = (test_imgs[i] - 128) / 256.0
        inputx = inputx.reshape((784, 1))
        dense.forward(inputx)
        sigmoid.forward(dense.end)
        loss.forward(sigmoid.end)
Ejemplo n.º 30
0
 def __init__(self,
              hlayers: int,
              neurons_per_layer: list,
              entrada: int,
              salida: int,
              weights=None,
              bias=None,
              ac_functions=None,
              lrate=0.1):
     if type(hlayers) != int:
         raise ValueError(
             "El número de capas ocultas debe ser un número entero positivo"
         )
     elif hlayers < 1:
         raise ValueError(
             "El número de capas ocultas debe ser un número entero positivo"
         )
     elif type(neurons_per_layer) != list:
         raise ValueError(
             "neurons_per_layer debe ser una lista de números enteros positivos"
         )
     elif len(neurons_per_layer) != hlayers:
         raise ValueError(
             "El largo de neurons_per_layer debe coincidir con el número de capas ocultas"
         )
     elif type(entrada) != int:
         raise ValueError(
             "El número de inputs debe ser un número entero positivo")
     elif entrada < 1:
         raise ValueError(
             "El número de inputs debe ser un número entero positivo")
     elif type(salida) != int:
         raise ValueError(
             "El número de neuronas en la capa de salida debe ser un número entero positivo"
         )
     elif salida < 1:
         raise ValueError(
             "El número de neuronas en la capa de salida debe ser un número entero positivo"
         )
     else:
         self.__lrate = lrate
         self.__hlayers = hlayers
         if ac_functions is None:
             ac_functions = {}
             for j in range(hlayers + 1):
                 ac_functions[j] = Sigmoid()
             self.__acfunctions = ac_functions
         elif type(ac_functions) != dict:
             raise ValueError(
                 "Input ac_functions debe ser un diccionario con las fuciones de activación "
                 + "de cada capa")
         elif len(list(ac_functions.keys())) != hlayers + 1:
             raise ValueError(
                 "Número de entradas del diccionario ac_functions debe ser igual al numero de capas"
             )
         else:
             self.__acfunctions = ac_functions
         self.__layers = {}
         if weights is None and bias is None:
             for i in range(hlayers + 1):
                 if i == 0:
                     self.__layers[i] = NeuronLayer(
                         neurons=neurons_per_layer[i],
                         n_weights=entrada,
                         ac_function=ac_functions[i],
                         lrate=lrate)
                 elif i == hlayers:
                     self.__layers[i] = NeuronLayer(
                         neurons=salida,
                         n_weights=neurons_per_layer[i - 1],
                         ac_function=ac_functions[i],
                         lrate=lrate)
                 else:
                     self.__layers[i] = NeuronLayer(
                         neurons=neurons_per_layer[i],
                         n_weights=neurons_per_layer[i - 1],
                         ac_function=ac_functions[i],
                         lrate=lrate)
         elif bias is None:
             if type(weights) is not dict:
                 raise ValueError(
                     "Input weights debe ser un diccionario con los diccionarios con las listas de "
                     + "pesos de cada capa")
             elif len(list(weights.keys())) != hlayers + 1:
                 raise ValueError(
                     "Número de entradas del diccionario weights debe ser igual al número de capas"
                 )
             else:
                 for i in range(hlayers + 1):
                     if i == 0:
                         self.__layers[i] = NeuronLayer(
                             neurons=neurons_per_layer[i],
                             n_weights=entrada,
                             weights=weights[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
                     elif i == hlayers:
                         self.__layers[i] = NeuronLayer(
                             neurons=salida,
                             n_weights=neurons_per_layer[i - 1],
                             weights=weights[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
                     else:
                         self.__layers[i] = NeuronLayer(
                             neurons=neurons_per_layer[i],
                             n_weights=neurons_per_layer[i - 1],
                             weights=weights[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
         elif weights is None:
             if type(bias) is not dict:
                 raise ValueError(
                     "Input bias debe ser un diccionario con los diccionarios con los bias de cada "
                     + "capa")
             elif len(list(bias.keys())) != hlayers + 1:
                 raise ValueError(
                     "Número de entradas del diccionario bias debe ser igual al número de capas"
                 )
             else:
                 for i in range(hlayers + 1):
                     if i == 0:
                         self.__layers[i] = NeuronLayer(
                             neurons=neurons_per_layer[i],
                             n_weights=entrada,
                             biass=bias[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
                     elif i == hlayers:
                         self.__layers[i] = NeuronLayer(
                             neurons=salida,
                             n_weights=neurons_per_layer[i - 1],
                             biass=bias[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
                     else:
                         self.__layers[i] = NeuronLayer(
                             neurons=neurons_per_layer[i],
                             n_weights=neurons_per_layer[i - 1],
                             biass=bias[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
         else:
             if type(weights) is not dict or type(bias) is not dict:
                 raise ValueError(
                     "El input weights y bias deben ser diccionarios con los diccionarios con las "
                     +
                     "listas de pesos y bias de cada capa de la red respectivamente"
                 )
             elif len(list(weights.keys())) != hlayers + 1 or len(
                     list(bias.keys())) != hlayers + 1:
                 raise ValueError(
                     "El número de entradas de los diccionario weights y bias debe ser igual al "
                     + "número de capas de la red")
             else:
                 for i in range(hlayers + 1):
                     if i == 0:
                         self.__layers[i] = NeuronLayer(
                             neurons=neurons_per_layer[i],
                             n_weights=entrada,
                             weights=weights[i],
                             bias=bias[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
                     elif i == hlayers:
                         self.__layers[i] = NeuronLayer(
                             neurons=salida,
                             n_weights=neurons_per_layer[i - 1],
                             weights=weights[i],
                             bias=bias[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
                     else:
                         self.__layers[i] = NeuronLayer(
                             neurons=neurons_per_layer[i],
                             n_weights=neurons_per_layer[i - 1],
                             weights=weights[i],
                             bias=bias[i],
                             ac_function=ac_functions[i],
                             lrate=lrate)
Ejemplo n.º 31
0
 def __init__(self, Wx, Wh, b):
     self.params = [Wx, Wh, b]
     self.grads = [np.zeros_like(Wx), np.zeros_like(Wh), np.zeros_like(b)]
     self.cache = None
     self.sigmoid = Sigmoid()
Ejemplo n.º 32
0
import numpy as np
import sys

sys.path.append("./core/")
from dense import Dense
from sigmoid import Sigmoid
from softmax import Softmax
from data_loader import get_images_and_labels, get_test_images_and_labels
import random

if __name__ == '__main__':

    dense = Dense(10, 784)
    dense.load_model("./model/w.npy", "./model/b.npy")
    sigmoid = Sigmoid()
    loss = Softmax()

    img, labels = get_images_and_labels()
    test_imgs, test_label = get_test_images_and_labels()

    train_label = np.zeros([10, 1])
    train_label[labels[0]] = 1
    inputx = (img[0] - 128) / 256.0

    batch_size = 1
    stop_accuracy_rate = 0.9

    image_number = 60000
    for k in range(3000):
        index_list = [i for i in range(image_number)]
        random.shuffle(index_list)
Ejemplo n.º 33
0
    if iden_train == "i":
        itbool = True
        break
    elif iden_train == "t":
        itbool = False
        break
    else:
        print("illegal input from keyboard.")
####

####
while True:
    sig_relu = input("sigmoid or relu? s/r > ")
    if sig_relu == "s":
        npyfile = 'wb_learn_s.npy'
        srclass = Sigmoid()
        srclass_c = Sigmoid()
        break
    elif sig_relu == "r":
        npyfile = 'wb_learn_r.npy'
        srclass = Relu()
        srclass_c = Relu()
        break
    else:
        print("illegal input from keyboard.")
####
sys.stdout.write("Now loading...")
sys.stdout.flush()

dlist = inclass.inputer(itbool)[0]
#print(dlist)
Ejemplo n.º 34
0
feed_dict = {inputs: x, weights: w, bias: b}

graph = topological_sort_layer(feed_dict)
output = forward_pass_layer(f, graph)
logging.info(output)
print(output)

# Sigmoid
from sigmoid import Sigmoid

logging.info("Sigmoid")
print("Sigmoid")

inputs, weights, bias = InputLayer(), InputLayer(), InputLayer()
f = LinearLayer(inputs, weights, bias)
g = Sigmoid(f)

x = np.array([[-1., -2.], [-1, -2]])
w = np.array([[2., -3], [2., -3]])
b = np.array([-3., -5])

feed_dict = {inputs: x, weights: w, bias: b}

graph = topological_sort_layer(feed_dict)
output = forward_pass_layer(g, graph)

logging.info(output)
print(output)

# Cost
from mse import MSE