def test_Distribute(): #initilalize master node CNN = [{ "l_type": "conv", "kernel": "W1", "hparams": { "stride": 1, "pad": 0 } }, { "l_type": "max", "hparams": { "stride": 1, "f": 2 } }] nodes = [{"ip": "localhost", "port": 9998}] edge = {"ip": "localhost", "port": 9000} image = np.array([1, 2]) master_node = Master(CNN, nodes, edge, image) np.random.seed(1) X1 = np.random.randn(1, 3, 3, 1) out = vecConv(X1[0, :, :, :], kernels["W1"], {"stride": 1, "pad": 0}) out2 = master_node.thread_Compute(X1, CNN[0]) np.testing.assert_array_equal(out2, out)
def test_conv(): np.random.seed(1) A_prev = np.random.randn(1, 3, 3, 3) hparameters = {"pad": 0, "stride": 1} w = np.ones((2, 2, 3, 1)) b = np.zeros(((1, 1, 1, 1))) c_out = conv_forward(A_prev, w, b, hparameters) print(c_out.shape) print(c_out[0, :, :, :]) v_out = vecConv(A_prev[0, :, :, :], w, hparameters) print(v_out.shape) print(v_out) np.testing.assert_array_equal(v_out, c_out[0, :, :, :])
def layerResult(self, layer, X, pos): """ layer-dictonary l_type,kernel,hparams X-numpy array(n_H_prev, n_W_prev, n_C_prev) pooling- X-numpy array(n_H_prev, n_W_prev, n_C_prev) hparameters-"f" and "stride" Pooling(X,hparameters,mode="max") conv vecConv(X,kernel,hparameters): X- numpy arrya shape (n_H_prev, n_W_prev, n_C_prev) kernel-numpy array of shape (f, f, n_C_prev, n_C) hparameters-- python dictinory containing stride and pad """ if (layer["l_type"] == "conv"): w = kernels[layer["kernel"]] hparam = layer["hparams"] return vecConv(X, w[:, :, :, pos[0]:pos[1]], hparam) else: hparam = layer["hparams"] mode = layer["l_type"] #batch size of 1 return Pooling(X[:, :, pos[0]:pos[1]], hparam, mode)
data_variable={"data":X,"pos":a,"layer":layer} a=(start,end) layer={"l_type":"conv","kernel":"W1","hparams":{"stride":1,"pad":0}} pooling layer testing layer2={"l_type":"max","hparams":{"stride":1,"f":2}} """ c, addr = s.accept() #receive data from client tic = time.process_time() data_variable = receive_array(data, payload_size, c) print('Connect with', addr, data_variable["data"].shape) #imgout=y.conv_forward(data_variable["data"], w.W1[:,:,:,data_variable["pos"]:], w.b1[:,:,:,data_variable["pos"]:],data_variable["hpara"]) #out={"data":imgout} X = data_variable["data"] hparam = data_variable["layer"]["hparams"] mode = data_variable["layer"]["l_type"] if (mode == "conv"): pos = data_variable["pos"] w = kernels[data_variable["layer"]["kernel"]] out = vecConv(X, w[:, :, :, pos[0]:pos[1]], hparam) else: out = Pooling(X, hparam, mode) dout = {"data": out} send(c, dout) toc = time.process_time() print("Computation time for conv part2 = " + str(1000 * (toc - tic)) + "ms") #send data to client #c.send(bytes("Welcome to server",'utf-8')) c.close()
print('Connect with', addr) X = data_variable["data"] index = data_variable["index"] #FOG node computation goes here for index, layer in enumerate(CNN[index:], index): mode = layer["l_type"] hparam = layer["hparams"] #check offload offload = False if (offload): #offload to the cloud #X= break else: if (mode == 'conv'): w = kernels["kernel"] X = vecConv(X, w, hparam) X += kernels["bias"] X = ActivationFunc(X, layer["act"]) else: X = Pooling(X, hparam, mode) #X - output send to roof node dout = {"data": X} send(c, dout) toc = time.process_time() print("Computation time FOG server = " + str(1000 * (toc - tic)) + "ms") c.close()
from vec import vecConv import numpy as np import time channel = [128, 64, 32, 16, 8, 4, 2, 1] np.random.seed(1) for i in channel: x = np.random.randn(64, 64, i) k = np.random.randn(9, 9, i, 256) tic = time.process_time() vecConv(x, k, {"pad": 0, "stride": 1}) toc = time.process_time() print("i = " + str(i) + " Computation time for conv= " + str(1000 * (toc - tic)) + "ms")