def test_saves_inputs_outputs_and_weights_to_network(self): input = randint(5, 10) output = randint(5, 10) weights = np.asarray([[randint(0, 2) for j in range(input)] for i in range(output)]) n = Net(Sigmoid) linear = Linear(n, input, output, weights) linear.forward(input) assert len(linear.network.saved_inputs) > 0 assert len(linear.network.saved_outputs) > 0 assert len(linear.network.saved_weights) > 0
class SimpleTest(Net): def __init__(self): super().__init__(MSE, learning_rate=0.02) self.a1 = Passive(self) self.l1 = Linear(self, 1, 5, np.asarray(weights1), np.asarray(my_biases1)) self.l3 = Linear(self, 5, 1, np.asarray(weights2), np.asarray(my_biases2)) def forward_pass(self, input): x = self.a1.call(self.l1.forward(input)) x = self.a1.call(self.l3.forward(x)) return x
class WithSoftmax(Net): def __init__(self): super().__init__(Cross, learning_rate=0.02) self.a1 = Passive(self) self.a2 = SoftMax(self) self.l1 = Linear(self, 2, 5, np.asarray(xor_weights1), np.asarray(xor_biases1)) self.l3 = Linear(self, 5, 2, np.asarray(xor_weights2), np.asarray(xor_biases2)) def forward_pass(self, input): x = self.a1.call(self.l1.forward(input)) x = self.a2.call(self.l3.forward(x)) return x
class WithActivation(Net): def __init__(self): super().__init__(MSE, learning_rate=0.02) self.a1 = ReLu(self) self.a2 = Sigmoid(self) self.l1 = Linear(self, 1, 5, np.asarray(weights1), np.asarray(my_biases1)) self.l3 = Linear(self, 5, 1, np.asarray(weights2), np.asarray(my_biases2)) def forward_pass(self, input): x = self.a1.call(self.l1.forward(input)) x = self.a2.call(self.l3.forward(x)) return x
def test_old_weights_are_updated_on_second_pass(self): input = randint(5, 10) output = randint(5, 10) weights = np.asarray([[randint(0, 2) for j in range(input)] for i in range(output)]) n = Net(Sigmoid) linear = Linear(n, input, output, weights) linear.forward(input) sh = np.asarray(linear.network.saved_weights).shape linear.forward(input) assert sh == np.asarray(linear.network.saved_weights).shape
def test_biases_are_initialized_correctly(self): input = randint(5, 10) output = randint(5, 10) biases = np.asarray([randint(1, 5) for i in range(output)]) linear = Linear(Net(Sigmoid), input, output, np.ndarray(0), biases) self.assertTrue(np.array_equal(linear.biases, biases))
def test_weights_are_initialized_correctly(self): input = randint(5, 10) output = randint(5, 10) weights = np.asarray([[randint(0, 2) for j in range(input)] for i in range(output)]) linear = Linear(Net(Sigmoid), input, output, weights) self.assertTrue((linear.weights == weights).all())
def test_linear_layer_output_correct_shape(self): input = randint(5, 10) output = randint(5, 10) a = [randint(0, 5) for i in range(input)] linear = Linear(Net(Sigmoid), input, output) self.assertTrue(len(linear.forward(a)) == output)
def __init__(self): super().__init__(Cross, learning_rate=0.02) self.a1 = Passive(self) self.a2 = SoftMax(self) self.l1 = Linear(self, 2, 5, np.asarray(xor_weights1), np.asarray(xor_biases1)) self.l3 = Linear(self, 5, 2, np.asarray(xor_weights2), np.asarray(xor_biases2))
def __init__(self): super().__init__(MSE, learning_rate=0.02) self.a1 = ReLu(self) self.a2 = Sigmoid(self) self.l1 = Linear(self, 1, 5, np.asarray(weights1), np.asarray(my_biases1)) self.l3 = Linear(self, 5, 1, np.asarray(weights2), np.asarray(my_biases2))