def create_container_test(): net = Network(name='test', driver='bridge') container = Container.create_container(network=net, name='ttttt', url='114.212.87.52:2376', image='ubuntu', command='/bin/sleep 30', version='1.21') print container.ip print container.cmd print container.ports
def test_simple(self): input = Container("input", Memory((3, ))) output = Activation("output", "input", ActivationFunctions.NONE) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertIsNotNone(net_out["output"])
def test_search_text(): note1 = Note( "Joseph Conrad", "Modernist author.", attrs={ "birth": "1857", "death": "1924", "nationality": "British-Polish", "novels": ["Nostromo", "The Secret Agent"] }, ) note2 = Note( "Charles Dickens", "Famous Victorian author.", attrs={ "birth": "1812", "death": "1870", "nationality": "British", "novels": ["Great Expectations"] }, ) container = Container(notes=[note1, note2]) condition = NumberConditional(target="1850", condition="le") query = container.search_child_note_attrs(condition, attrs=["birth"]) assert len(query) == 1
def test_simple_RELU(self): input = Container("input", Memory((3,))) output = Activation("output", "input", ActivationFunctions.RELU) input.fill([-5, 10, -0.5]) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertIsNotNone(net_out["output"]) self.assertEqual(net_out["output"], [0, 10, 0])
def create_noimage(): url = '114.212.87.52:2376' version = '1.21' volume = None network = None dic = {} dic['container_name'] = 'test' con = Container(url, version, dic, volume, network)
def CreateContainer(self, request, context): notes = [ document.children[note_id] for note_id in request.child_note_ids ] container = Container(attrs=request.attrs, notes=notes) document.children[container.id] = container return tasks_pb2.ContainerReply(id=container.id, attrs=container.attrs, child_note_ids=request.child_note_ids)
def __create_model(self, weights_folder, dump_graph): layers = [] #input self.input_layer = Container("input", Memory((1, 57))) self.input_hidden_layer = Container("in_hidden", Memory((1, 128))) layers.extend([self.input_layer, self.input_hidden_layer]) concat = Concatenation("concat", ["input", "in_hidden"], 1) layers.append(concat) if weights_folder[-1] != "/" or \ weights_folder[-2:] != "\\": weights_folder += "/" weights_fc_1 = Container("weights_fc_1", Memory((128, 185))) biases_fc_1 = Container("biases_fc_1", Memory((128,))) weights_fc_1.fill(np.loadtxt(weights_folder + "i2h.weight").reshape((128, 185))) biases_fc_1.fill(np.loadtxt(weights_folder + "i2h.bias").reshape((128,))) out_hidden = Linear("i2h", "concat", "weights_fc_1", "biases_fc_1") layers.extend([out_hidden, weights_fc_1, biases_fc_1]) weights_fc_2 = Container("weights_fc_2", Memory((18, 185))) biases_fc_2 = Container("biases_fc_2", Memory((18,))) weights_fc_2.fill(np.loadtxt(weights_folder + "i2o.weight").reshape((18, 185))) biases_fc_2.fill(np.loadtxt(weights_folder + "i2o.bias").reshape((18,))) i2o = Linear("i2o", "concat", "weights_fc_2", "biases_fc_2") layers.extend([i2o, weights_fc_2, biases_fc_2]) pre_out_reshape = Reshape("pre_out_reshape", "i2o", (1, 1, 1, 18)) output = Softmax("output", "pre_out_reshape", do_log=True) layers.extend([pre_out_reshape, output]) return Network(layers, dump_graph=dump_graph)
def test_simple_batch_8(self): input = Container("input", Memory((8, 2, 4, 4))) output = Softmax("output", "input") inp_array = np.arange(8 * 2 * 4 * 4).reshape((8, 2, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) out = net_out["output"] sum = np.sum(out) self.assertAlmostEqual(8.0, sum)
def test_simple_batch_1(self): input = Container("input", Memory((1, 1, 4, 4))) output = Softmax("output", "input") inp_array = np.array( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]).reshape( (1, 1, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) self.assertAlmostEqual(1.0, np.sum(net_out["output"]))
def test_concat_feature(self): input0 = Container("input0", Memory((2, 4, 1, 2))) inp0_arr = np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7]).reshape((2, 4, 1, 2)) input0.fill(inp0_arr) input1 = Container("input1", Memory((2, 1, 1, 2))) inp1_arr = np.array([13, 13, 24, 24]).reshape((2, 1, 1, 2)) input1.fill(inp1_arr) concat = Concatenation("concat", ["input0", "input1"], axis=1) layers = [input0, input1, concat] network = Network(layers) net_out = network.execute() real_output = net_out["concat"] self.assertEqual(len(net_out), 1) self.assertIsNotNone(real_output) ref_output = np.concatenate((inp0_arr, inp1_arr), axis=1) self.assertEqual(real_output.shape, (2, 5, 1, 2)) self.assertTrue(np.array_equal(ref_output, real_output))
def test_simple_MAX_1(self): input = Container("input", Memory((2, 2, 4, 4))) output = Pooling("output", "input", (2, 2), PoolingType.MAX) inp_array = np.arange(2 * 2 * 4 * 4).reshape((2, 2, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) out_reference = np.array( [5, 7, 13, 15, 21, 23, 29, 31, 37, 39, 45, 47, 53, 55, 61, 63]).reshape((2, 2, 2, 2)) self.assertTrue(np.array_equal(out_reference, net_out["output"]))
def test_simple_RELU(self): input = Container("input", Memory((1, 1, 5, 5))) inp_array = np.array([ 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5 ]).reshape((1, 1, 5, 5)) input.fill(inp_array) reshape = Reshape("reshape", "input", (25, )) net2 = Network([input, reshape]) outputs = net2.execute() self.assertEqual(len(outputs), 1) self.assertEqual(outputs["reshape"].shape, (25, ))
def test_simple_MAX_0(self): input = Container("input", Memory((1, 1, 4, 4))) output = Pooling("output", "input", (2, 2), PoolingType.MAX) inp_array = np.array( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]).reshape( (1, 1, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) out_reference = np.array([6.0, 8.0, 14.0, 16.0]).reshape((1, 1, 2, 2)) self.assertTrue(np.array_equal(out_reference, net_out["output"]))
def test_stride(self): input = Container("input", Memory((1, 1, 4, 4))) output = Pooling("output", "input", (2, 2), PoolingType.MAX, stride=(3, 3)) inp_array = np.arange(4 * 4).reshape((1, 1, 4, 4)) input.fill(inp_array) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertEqual(len(net_out["output"].shape), 4) out_reference = np.array([5]).reshape((1, 1, 1, 1)) self.assertTrue(np.array_equal(out_reference, net_out["output"]))
def test_generic_many_shapes(self): input_shape = (8, 3, 32, 32) input = Container("input", Memory(input_shape)) input_values = np.random.uniform(-2, 2, input_shape) input.fill(input_values) shapes = [(8, 3, 1, 1024), (8, 3, 1024, 1), (24, 1, 32, 32), (1, 1, 1, 24576)] for shape in shapes: output = Reshape("output", "input", shape) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertIsNotNone(net_out["output"]) self.assertEqual(net_out["output"].shape, shape)
def test_multiple_outputs(self): input = Container("A", Memory((3, ))) b = Activation("B", "A", ActivationFunctions.RELU) d = Activation("D", "B", ActivationFunctions.RELU) f = Activation("F", "B", ActivationFunctions.RELU) c = Activation("C", "A", ActivationFunctions.RELU) g = Activation("G", "C", ActivationFunctions.RELU) e = Activation("E", "A", ActivationFunctions.RELU) network = Network([b, d, f, c, g, e, input]) net_out = network.execute() self.assertEqual(len(net_out), 4) self.assertIsNotNone(net_out["D"]) self.assertIsNotNone(net_out["F"]) self.assertIsNotNone(net_out["G"]) self.assertIsNotNone(net_out["E"])
def test_generic_3d(self): shapes = [(3, 32, 32), (10, 1, 1), (1, 2, 3), (5, 224, 224)] for activ_func in ActivationFunctions: for input_shape in shapes: input = Container("input", Memory(input_shape)) output = Activation("output", "input", activ_func) input_values = np.random.uniform(-2, 2, input_shape) input.fill(input_values) network = Network([output, input]) net_out = network.execute() self.assertEqual(len(net_out), 1) self.assertIsNotNone(net_out["output"]) reference_output = self.get_reference(activ_func, input_values) real_output = net_out["output"] self.assertTrue(np.array_equal(reference_output, real_output))
def load_document(filename): with open(filename, 'r') as infile: data = json.load(infile) document = Document() for obj in data: if obj["type"] == "note": document.append(Note.from_dict()) elif obj["type"] == "container": document.append(Container.from_dict()) elif obj["type"] == "connection": document.append(Connection.from_dict()) elif obj["type"] == "string_conditional": document.append(StringConditional.from_dict()) elif obj["type"] == "number_conditional": document.append(NumberConditional.from_dict())
def test_rule(): note = Note( "Joseph Conrad", "Modernist author.", attrs={ "birth": "1857", "death": "1924", "nationality": "British-Polish", "novels": ["Nostromo", "The Secret Agent"] }, ) container = Container(notes=[note]) rule = Rule(target="title", add_text="Author - ", effect_location="prepend") container.add_rule(rule) assert note.attrs["title"] == "Author - Joseph Conrad"
def create_dict_test(): url = '114.212.87.52:2376' version = '1.21' volume = None network = None dic = {} dic['image'] = 'training/webapp' dic['container_name'] = 'test' # dic['command'] = '/bin/sleep 30' dic['hostname'] = 'testhostname' dic['mem_limit'] = '24m' dic['ports'] = [80, 8000] dic['cpu_shares'] = 3 volume = Volume(['/home/monkey/fuli:/fuli:rw', '/home/monkey/fire:/fire']) network = Network('test', 'bridge') dic['privileged'] = True con = Container(url, version, dic, volume, network) con.create() con.start()
from api.container import Container container = Container.create_container(url='114.212.87.52:2376', image='ubuntu', command='/bin/sleep 30', version='1.21') print container.ip print container.cmd print container.ports
from api.network import Network from api.linear import Linear from api.container import Container from api.memory import Memory import numpy as np # ---- TEST CONTAINER -------- input = Container("input", Memory((1, 10))) inp_array = np.array([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]).reshape((1, 10)) input.fill(inp_array) weights = Container("weights", Memory((2, 10))) bias = Container("bias", Memory((2, ))) weights_array = np.arange(20).reshape((2, 10)) bias_array = np.zeros((2, )) weights.fill(weights_array) bias.fill(bias_array) fc = Linear("fc", "input", "weights", "bias") net2 = Network([input, weights, bias, fc], dump_graph=False) outputs = net2.execute() print(outputs) # ---- END -------- print("end tests")
def get_container(url, version, name): con = Container.get_container(url, version, name) print con.ip print con.cmd print con.network.name
def __create_model(self, weights_folder, dump_graph): layers = [] #input self.input_layer = Container("input", Memory((1, 1, 32, 32))) layers.append(self.input_layer) if weights_folder[-1] != "/" or \ weights_folder[-2:] != "\\": weights_folder += "/" #first convolution weights_1 = Container("weights_1", Memory((6, 1, 5, 5))) biases_1 = Container("biases_1", Memory((6, ))) weights_1.fill( np.loadtxt(weights_folder + "conv1.weight").reshape((6, 1, 5, 5))) biases_1.fill(np.loadtxt(weights_folder + "conv1.bias").reshape((6, ))) conv_1 = Convolution("conv_1", "input", "weights_1", "biases_1") relu_1 = Activation("activ_1", "conv_1", ActivationFunctions.RELU) pool_1 = Pooling("pool_1", "activ_1", (2, 2), PoolingType.MAX) layers.extend([weights_1, biases_1, conv_1, relu_1, pool_1]) #2nd convolution weights_2 = Container("weights_2", Memory((16, 6, 5, 5))) biases_2 = Container("biases_2", Memory((16, ))) weights_2.fill( np.loadtxt(weights_folder + "conv2.weight").reshape((16, 6, 5, 5))) biases_2.fill( np.loadtxt(weights_folder + "conv2.bias").reshape((16, ))) conv_2 = Convolution("conv_2", "pool_1", "weights_2", "biases_2") relu_2 = Activation("activ_2", "conv_2", ActivationFunctions.RELU) pool_2 = Pooling("pool_2", "activ_2", (2, 2), PoolingType.MAX) layers.extend([weights_2, biases_2, conv_2, relu_2, pool_2]) #reshape reshape = Reshape("reshape", "pool_2", (1, 400)) layers.append(reshape) #first fc weights_fc_1 = Container("weights_fc_1", Memory((120, 400))) biases_fc_1 = Container("biases_fc_1", Memory((120, ))) weights_fc_1.fill( np.loadtxt(weights_folder + "fc1.weight").reshape((120, 400))) biases_fc_1.fill( np.loadtxt(weights_folder + "fc1.bias").reshape((120, ))) fc_1 = Linear("fc_1", "reshape", "weights_fc_1", "biases_fc_1") relu_fc_1 = Activation("relu_fc_1", "fc_1", ActivationFunctions.RELU) layers.extend([weights_fc_1, biases_fc_1, fc_1, relu_fc_1]) #second fc weights_fc_2 = Container("weights_fc_2", Memory((84, 120))) biases_fc_2 = Container("biases_fc_2", Memory((84, ))) weights_fc_2.fill( np.loadtxt(weights_folder + "fc2.weight").reshape((84, 120))) biases_fc_2.fill( np.loadtxt(weights_folder + "fc2.bias").reshape((84, ))) fc_2 = Linear("fc_2", "relu_fc_1", "weights_fc_2", "biases_fc_2") relu_fc_2 = Activation("relu_fc_2", "fc_2", ActivationFunctions.RELU) layers.extend([weights_fc_2, biases_fc_2, fc_2, relu_fc_2]) #third fc weights_fc_3 = Container("weights_fc_3", Memory((10, 84))) biases_fc_3 = Container("biases_fc_3", Memory((10, ))) weights_fc_3.fill( np.loadtxt(weights_folder + "fc3.weight").reshape((10, 84))) biases_fc_3.fill( np.loadtxt(weights_folder + "fc3.bias").reshape((10, ))) fc_3 = Linear("output", "relu_fc_2", "weights_fc_3", "biases_fc_3") layers.extend([weights_fc_3, biases_fc_3, fc_3]) return Network(layers, dump_graph=dump_graph)
def create_container(ctx, note_ids): notes = [ctx[id] for id in note_ids] container = Container(notes=notes) ctx[container.id] = container