def test_simple(self): input = Container("input", Memory((3, ))) output = Activation("output", "input", ActivationFunctions.NONE) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertIsNotNone(net_out["output"])
def test_simple_RELU(self): input = Container("input", Memory((3,))) output = Activation("output", "input", ActivationFunctions.RELU) input.fill([-5, 10, -0.5]) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertIsNotNone(net_out["output"]) self.assertEqual(net_out["output"], [0, 10, 0])
def __create_model(self, weights_folder, dump_graph): layers = [] #input self.input_layer = Container("input", Memory((1, 57))) self.input_hidden_layer = Container("in_hidden", Memory((1, 128))) layers.extend([self.input_layer, self.input_hidden_layer]) concat = Concatenation("concat", ["input", "in_hidden"], 1) layers.append(concat) if weights_folder[-1] != "/" or \ weights_folder[-2:] != "\\": weights_folder += "/" weights_fc_1 = Container("weights_fc_1", Memory((128, 185))) biases_fc_1 = Container("biases_fc_1", Memory((128,))) weights_fc_1.fill(np.loadtxt(weights_folder + "i2h.weight").reshape((128, 185))) biases_fc_1.fill(np.loadtxt(weights_folder + "i2h.bias").reshape((128,))) out_hidden = Linear("i2h", "concat", "weights_fc_1", "biases_fc_1") layers.extend([out_hidden, weights_fc_1, biases_fc_1]) weights_fc_2 = Container("weights_fc_2", Memory((18, 185))) biases_fc_2 = Container("biases_fc_2", Memory((18,))) weights_fc_2.fill(np.loadtxt(weights_folder + "i2o.weight").reshape((18, 185))) biases_fc_2.fill(np.loadtxt(weights_folder + "i2o.bias").reshape((18,))) i2o = Linear("i2o", "concat", "weights_fc_2", "biases_fc_2") layers.extend([i2o, weights_fc_2, biases_fc_2]) pre_out_reshape = Reshape("pre_out_reshape", "i2o", (1, 1, 1, 18)) output = Softmax("output", "pre_out_reshape", do_log=True) layers.extend([pre_out_reshape, output]) return Network(layers, dump_graph=dump_graph)
def test_simple_batch_1(self): input = Container("input", Memory((1, 1, 4, 4))) output = Softmax("output", "input") inp_array = np.array( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]).reshape( (1, 1, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) self.assertAlmostEqual(1.0, np.sum(net_out["output"]))
def test_simple_batch_8(self): input = Container("input", Memory((8, 2, 4, 4))) output = Softmax("output", "input") inp_array = np.arange(8 * 2 * 4 * 4).reshape((8, 2, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) out = net_out["output"] sum = np.sum(out) self.assertAlmostEqual(8.0, sum)
def test_simple_RELU(self): input = Container("input", Memory((1, 1, 5, 5))) inp_array = np.array([ 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5 ]).reshape((1, 1, 5, 5)) input.fill(inp_array) reshape = Reshape("reshape", "input", (25, )) net2 = Network([input, reshape]) outputs = net2.execute() self.assertEqual(len(outputs), 1) self.assertEqual(outputs["reshape"].shape, (25, ))
def test_concat_feature(self): input0 = Container("input0", Memory((2, 4, 1, 2))) inp0_arr = np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7]).reshape((2, 4, 1, 2)) input0.fill(inp0_arr) input1 = Container("input1", Memory((2, 1, 1, 2))) inp1_arr = np.array([13, 13, 24, 24]).reshape((2, 1, 1, 2)) input1.fill(inp1_arr) concat = Concatenation("concat", ["input0", "input1"], axis=1) layers = [input0, input1, concat] network = Network(layers) net_out = network.execute() real_output = net_out["concat"] self.assertEqual(len(net_out), 1) self.assertIsNotNone(real_output) ref_output = np.concatenate((inp0_arr, inp1_arr), axis=1) self.assertEqual(real_output.shape, (2, 5, 1, 2)) self.assertTrue(np.array_equal(ref_output, real_output))
def test_simple_MAX_0(self): input = Container("input", Memory((1, 1, 4, 4))) output = Pooling("output", "input", (2, 2), PoolingType.MAX) inp_array = np.array( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]).reshape( (1, 1, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) out_reference = np.array([6.0, 8.0, 14.0, 16.0]).reshape((1, 1, 2, 2)) self.assertTrue(np.array_equal(out_reference, net_out["output"]))
def test_simple_MAX_1(self): input = Container("input", Memory((2, 2, 4, 4))) output = Pooling("output", "input", (2, 2), PoolingType.MAX) inp_array = np.arange(2 * 2 * 4 * 4).reshape((2, 2, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) out_reference = np.array( [5, 7, 13, 15, 21, 23, 29, 31, 37, 39, 45, 47, 53, 55, 61, 63]).reshape((2, 2, 2, 2)) self.assertTrue(np.array_equal(out_reference, net_out["output"]))
def test_generic_many_shapes(self): input_shape = (8, 3, 32, 32) input = Container("input", Memory(input_shape)) input_values = np.random.uniform(-2, 2, input_shape) input.fill(input_values) shapes = [(8, 3, 1, 1024), (8, 3, 1024, 1), (24, 1, 32, 32), (1, 1, 1, 24576)] for shape in shapes: output = Reshape("output", "input", shape) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertIsNotNone(net_out["output"]) self.assertEqual(net_out["output"].shape, shape)
def test_multiple_outputs(self): input = Container("A", Memory((3, ))) b = Activation("B", "A", ActivationFunctions.RELU) d = Activation("D", "B", ActivationFunctions.RELU) f = Activation("F", "B", ActivationFunctions.RELU) c = Activation("C", "A", ActivationFunctions.RELU) g = Activation("G", "C", ActivationFunctions.RELU) e = Activation("E", "A", ActivationFunctions.RELU) network = Network([b, d, f, c, g, e, input]) net_out = network.execute() self.assertEqual(len(net_out), 4) self.assertIsNotNone(net_out["D"]) self.assertIsNotNone(net_out["F"]) self.assertIsNotNone(net_out["G"]) self.assertIsNotNone(net_out["E"])
def test_stride(self): input = Container("input", Memory((1, 1, 4, 4))) output = Pooling("output", "input", (2, 2), PoolingType.MAX, stride=(3, 3)) inp_array = np.arange(4 * 4).reshape((1, 1, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) out_reference = np.array([5]).reshape((1, 1, 1, 1)) self.assertTrue(np.array_equal(out_reference, net_out["output"]))
def test_generic_3d(self): shapes = [(3, 32, 32), (10, 1, 1), (1, 2, 3), (5, 224, 224)] for activ_func in ActivationFunctions: for input_shape in shapes: input = Container("input", Memory(input_shape)) output = Activation("output", "input", activ_func) input_values = np.random.uniform(-2, 2, input_shape) input.fill(input_values) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertIsNotNone(net_out["output"]) reference_output = self.get_reference(activ_func, input_values) real_output = net_out["output"] self.assertTrue(np.array_equal(reference_output, real_output))
from api.network import Network from api.linear import Linear from api.container import Container from api.memory import Memory import numpy as np # ---- TEST CONTAINER -------- input = Container("input", Memory((1, 10))) inp_array = np.array([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]).reshape((1, 10)) input.fill(inp_array) weights = Container("weights", Memory((2, 10))) bias = Container("bias", Memory((2, ))) weights_array = np.arange(20).reshape((2, 10)) bias_array = np.zeros((2, )) weights.fill(weights_array) bias.fill(bias_array) fc = Linear("fc", "input", "weights", "bias") net2 = Network([input, weights, bias, fc], dump_graph=False) outputs = net2.execute() print(outputs) # ---- END -------- print("end tests")
def __create_model(self, weights_folder, dump_graph): layers = [] #input self.input_layer = Container("input", Memory((1, 1, 32, 32))) layers.append(self.input_layer) if weights_folder[-1] != "/" or \ weights_folder[-2:] != "\\": weights_folder += "/" #first convolution weights_1 = Container("weights_1", Memory((6, 1, 5, 5))) biases_1 = Container("biases_1", Memory((6, ))) weights_1.fill( np.loadtxt(weights_folder + "conv1.weight").reshape((6, 1, 5, 5))) biases_1.fill(np.loadtxt(weights_folder + "conv1.bias").reshape((6, ))) conv_1 = Convolution("conv_1", "input", "weights_1", "biases_1") relu_1 = Activation("activ_1", "conv_1", ActivationFunctions.RELU) pool_1 = Pooling("pool_1", "activ_1", (2, 2), PoolingType.MAX) layers.extend([weights_1, biases_1, conv_1, relu_1, pool_1]) #2nd convolution weights_2 = Container("weights_2", Memory((16, 6, 5, 5))) biases_2 = Container("biases_2", Memory((16, ))) weights_2.fill( np.loadtxt(weights_folder + "conv2.weight").reshape((16, 6, 5, 5))) biases_2.fill( np.loadtxt(weights_folder + "conv2.bias").reshape((16, ))) conv_2 = Convolution("conv_2", "pool_1", "weights_2", "biases_2") relu_2 = Activation("activ_2", "conv_2", ActivationFunctions.RELU) pool_2 = Pooling("pool_2", "activ_2", (2, 2), PoolingType.MAX) layers.extend([weights_2, biases_2, conv_2, relu_2, pool_2]) #reshape reshape = Reshape("reshape", "pool_2", (1, 400)) layers.append(reshape) #first fc weights_fc_1 = Container("weights_fc_1", Memory((120, 400))) biases_fc_1 = Container("biases_fc_1", Memory((120, ))) weights_fc_1.fill( np.loadtxt(weights_folder + "fc1.weight").reshape((120, 400))) biases_fc_1.fill( np.loadtxt(weights_folder + "fc1.bias").reshape((120, ))) fc_1 = Linear("fc_1", "reshape", "weights_fc_1", "biases_fc_1") relu_fc_1 = Activation("relu_fc_1", "fc_1", ActivationFunctions.RELU) layers.extend([weights_fc_1, biases_fc_1, fc_1, relu_fc_1]) #second fc weights_fc_2 = Container("weights_fc_2", Memory((84, 120))) biases_fc_2 = Container("biases_fc_2", Memory((84, ))) weights_fc_2.fill( np.loadtxt(weights_folder + "fc2.weight").reshape((84, 120))) biases_fc_2.fill( np.loadtxt(weights_folder + "fc2.bias").reshape((84, ))) fc_2 = Linear("fc_2", "relu_fc_1", "weights_fc_2", "biases_fc_2") relu_fc_2 = Activation("relu_fc_2", "fc_2", ActivationFunctions.RELU) layers.extend([weights_fc_2, biases_fc_2, fc_2, relu_fc_2]) #third fc weights_fc_3 = Container("weights_fc_3", Memory((10, 84))) biases_fc_3 = Container("biases_fc_3", Memory((10, ))) weights_fc_3.fill( np.loadtxt(weights_folder + "fc3.weight").reshape((10, 84))) biases_fc_3.fill( np.loadtxt(weights_folder + "fc3.bias").reshape((10, ))) fc_3 = Linear("output", "relu_fc_2", "weights_fc_3", "biases_fc_3") layers.extend([weights_fc_3, biases_fc_3, fc_3]) return Network(layers, dump_graph=dump_graph)