def test_get_differentials(self): derivatives = self.network.get_derivatives() self.network.update_network(self.X) # Calculate derivative numerically using taylor series dw = 1E-6 for test_i, layer in enumerate(self.network.data): for test_j in range(len(layer)): analytical = derivatives[test_i][test_j](self.Y) for k in range(self.network.data[test_i][test_j].w.shape[0]): if type(self.network.data[test_i][test_j]) == Softmax: for l in range(self.network.data[0][0].number_classes): self.network.data[0][0].W[k][l] -= dw output1 = self.network.update_network(self.X)[0] self.network.data[0][0].W[k][l] += 2 * dw output2 = self.network.update_network(self.X)[0] numerical = (log_likelihood(self.Y, output2) - log_likelihood(self.Y, output1)) / ( 2 * dw) self.assertAlmostEqual( cp.asnumpy(numerical), cp.asnumpy(analytical[k][l]), 5) else: self.network.data[test_i][test_j].w[k] -= dw output1 = self.network.update_network(self.X)[0] self.network.data[test_i][test_j].w[k] += 2 * dw output2 = self.network.update_network(self.X)[0] numerical = (log_likelihood(self.Y, output2) - log_likelihood(self.Y, output1)) / (2 * dw) self.assertAlmostEqual(cp.asnumpy(numerical), cp.asnumpy(analytical[k]), 5)
], [ SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))), SingleNeuron(list(range(n_components))) ]]) predictions, x = network.update_network(X2_train) ll_train = log_likelihood(Y_train, predictions) predictions, x = network.update_network(X2_test) ll_test = log_likelihood(Y_test, predictions) print(f'Initial ll_train = {ll_train/n_train}, ll_test = {ll_test/(n_test)}') # To load previously trained weights # w_data = np.load('cifar-10/w_data.npy', allow_pickle=True) # network.set_weights(w_data) network.train(X2_train, Y_train, 50) predictions, x = network.update_network(X2_train) ll_train = log_likelihood(Y_train, predictions) predictions, x = network.update_network(X2_test) ll_test = log_likelihood(Y_test, predictions) print(f'Final ll_train = {ll_train/n_train}, ll_test = {ll_test/(n_test)}\n')
def test_log_likelihood(self): output = self.network.update_network(self.X)[0] self.assertEqual(log_likelihood(self.Y, output).shape, ())