def instantiate_comms(self): if int(self.__master_config_dict['register_map'][0]) == 1 and int(self.__master_config_dict['serial_config'][0]) == 1: self.a = Communication(register_map = self.__master_config_dict['register_map'][1], serial_config = self.__master_config_dict['serial_config'][1]) elif int(self.__master_config_dict['register_map'][0]) == 1: self.a = Communication(register_map = self.__master_config_dict['register_map'][1]) elif int(self.__master_config_dict['serial_config'][0]) == 1: self.a = Communication(serial_config = self.__master_config_dict['serial_config'][1]) else: self.a = Communication() # Pass no arguments
class AllReduceAdvancedExample: comms = Communication.Communication() def execute(self): rank = self.comms.comm.Get_rank() size = self.comms.comm.Get_size() num_of_data_per_rank = 2 start = rank * num_of_data_per_rank + 1 end = (rank + 1) * num_of_data_per_rank input = np.linspace(start, end, num_of_data_per_rank, dtype='i') print("Advanced: Input :" + str(input) + " From Rank : " + str(rank)) # initialize the numpy arrays that store the results from reduce operation output_max = np.empty(num_of_data_per_rank, 'i') output_sum = np.empty(num_of_data_per_rank, 'i') #perform reduction based on sum and maximum self.comms.allreduce(input=input, output=output_max, op=self.comms.mpi.MAX, dtype=self.comms.mpi.INT, root=0) self.comms.allreduce(input=input, output=output_sum, op=self.comms.mpi.SUM, dtype=self.comms.mpi.INT, root=0) if (rank == 0): print("Advanced: Output Max : " + str(output_max) + ", from Rank " + str(rank) + "\n") print("Advanced: Output Sum : " + str(output_sum) + ", from Rank " + str(rank) + "\n")
class AllReduceExample: comms = Communication.Communication() def execute(self): rank = self.comms.comm.Get_rank() size = self.comms.comm.Get_size() input = np.array(rank, dtype='i') print("Simple: Input :" + str(input) + " From Rank : " + str(rank)) # initialize the numpy arrays that store the results from reduce operation output_max = np.array(0, 'i') output_sum = np.array(0, 'i') #perform reduction based on sum and maximum self.comms.allreduce(input=input, output=output_max, op=self.comms.mpi.MAX, dtype=self.comms.mpi.INT) self.comms.allreduce(input=input, output=output_sum, op=self.comms.mpi.SUM, dtype=self.comms.mpi.INT) if (rank == 0): print("Simple: Output Max : " + str(output_max) + ", from Rank " + str(rank) + "\n") print("Simple: Output Sum : " + str(output_sum) + ", from Rank " + str(rank) + "\n")
def example2(self): comms = Communication.Communication() max_itr = 10000 rank = self.comms.comm.Get_rank() size = self.comms.comm.Get_size() input = np.array(rank, dtype='i') print("Simple: Input :" + str(input) + " From Rank : " + str(rank)) # initialize the numpy arrays that store the results from reduce operation # output_max = np.array(0, 'i') exec_time = 0 exec_time -= time.time() for i in range(0, max_itr): output_sum = np.array(0, 'i') # perform reduction based on sum and maximum # self.comms.allreduce(input=input, output=output_max, op=self.comms.mpi.MAX, dtype=self.comms.mpi.INT) self.comms.allreduce(input=input, output=output_sum, op=self.comms.mpi.SUM, dtype=self.comms.mpi.INT) if (rank == 0): k = 1 # print("Simple: Output Max : " + str(output_max) + ", from Rank " + str(rank) + "\n") #print("Simple: Output Sum : " + str(output_sum) + ", from Rank " + str(rank) + "\n") exec_time += time.time() if (i == (max_itr - 1)): if (rank == 0): exec_time = exec_time / 10 print("Execution Time : ", exec_time)
class BcastRecvExample: comms = Communication.Communication() def example(self): rank = self.comms.comm.Get_rank() if (rank == 0): input = np.array([0, 1, 2, 3, 4], dtype='i') else: input = np.empty(5, dtype='i') # if (rank == 0): # print("Broadcasting Data : " + str(input) + ", from Rank " + str(rank) + "\n") self.comms.bcast(input=input, dtype=self.comms.mpi.INT, root=0) print("Receiving Data : " + str(input) + ", from Rank " + str(rank) + "\n")
class ISendIRecvExample: comms = Communication.Communication() def example(self): rank = self.comms.comm.Get_rank() if (rank == 0): input = np.array([0, 1, 2, 3, 4]) self.comms.isend(input=[input, self.comms.mpi.INT], dest=1, tag=11) print("Sending Data : " + str(input) + ", from Rank " + str(rank) + "\n") elif (rank == 1): data = self.comms.irecv(source=0, tag=11) print(type(data)) print("Receiving Data : " + str(data) + ", from Rank " + str(rank) + "\n")
def svm_psgd(X, y): comms = Communication.Communication() rank = comms.comm.Get_rank() size = comms.comm.Get_size() m = len(X) partition_size = m / size eta = 1 epochs = 0 T = 10 if size > 1: epochs = 5 else: epochs = T print("World Size : ", size, epochs) start = rank * partition_size end = start + partition_size X_p = X[start:end,:] y_p = y[start:end] m1 = len(X_p[0]) w = np.zeros(m1, 'f') grad = np.zeros(m1, 'f') grad_r = np.zeros(m1, 'f') for epoch in range(1, epochs): for i, x in enumerate(X_p): if (y_p[i] * np.dot(X_p[i], w)) < 1: grad = ((X_p[i] * y_p[i]) + (-2 * (1 / epoch) * w)) else: grad = (-2 * (1 / epoch) * w) comms.allreduce(input=grad, output=grad_r, op=comms.mpi.SUM, dtype=comms.mpi.FLOAT) print(rank, epoch, grad, grad_r, w) if (rank == 0): g_global = 0 if(size > 1): g_global = grad_r / (size * partition_size) else : g_global = grad_r / (size) w = w + eta * g_global comms.bcast(input=w, dtype=comms.mpi.FLOAT, root=0) return rank,w
class ScatterExample: comms = Communication.Communication() def example(self): rank = self.comms.comm.Get_rank() size = self.comms.comm.Get_size() num_of_data_per_rank = 8 input = np.array( [[1, 2, 3, 4, 5, 6, 7, 8], [1, 3, 5, 7, 9, 11, 13, 15]], np.int32) recvbuf = np.empty(num_of_data_per_rank, dtype='i') self.comms.scatter(input=input, recvbuf=recvbuf, dtype=self.comms.mpi.INT, root=0) if (rank == 0): print("Scattering Data : " + str(input) + ", from Rank " + str(rank) + "\n") print("Receiving Data : " + str(recvbuf) + ", from Rank " + str(rank) + "\n")
class SendRecvExample: comms = Communication.Communication() def example(self): rank = self.comms.comm.Get_rank() if (rank == 0): input = np.array([0, 1, 2, 3, 4], dtype='i') self.comms.send(input=input, dtype=self.comms.mpi.INT, dest=1, tag=11) print("Sending Data : " + str(input) + ", from Rank " + str(rank) + "\n") elif (rank == 1): data = self.comms.recv(source=0, dtype=self.comms.mpi.INT, tag=11, size=5) print("Receiving Data : " + str(data) + ", from Rank " + str(rank) + "\n")
class GatherExample: comms = Communication.Communication() def example(self): rank = self.comms.comm.Get_rank() size = self.comms.comm.Get_size() num_of_data_per_rank = 2 start = rank * num_of_data_per_rank + 1 end = (rank + 1) * num_of_data_per_rank sendbuf = np.linspace(start, end, num_of_data_per_rank, dtype='i') print("Sendbuf :" + str(sendbuf) + " From Rank : " + str(rank)) recvbuf = None if (rank == 0): recvbuf = np.empty(num_of_data_per_rank * size, dtype='i') self.comms.gather(sendbuf=sendbuf, recvbuf=recvbuf, dtype=self.comms.mpi.INT, root=0) if (rank == 0): print("Receiving Data : " + str(recvbuf) + ", from Rank " + str(rank) + "\n")
exp_name + " Parallel SGD SVM Accuracy : " + str(acc) + "%" + ", " + str(time) + ", Epochs : " + str(epochs) + ", " + str(beta1) + ", " + str(beta2)) fp = open( "logs/psgd/adam/" + socket.gethostname() + "_" + exp_name + "_batch_size_" + str(batch_size) + "_cores_" + str(world_size) + "_psgd_adam_pcd_results.txt", "a") # fp.write("alpha : " + str(self.alpha) + ", epochs : " + str(self.epochs) + ", accuracy : " + str(self.acc) + "%" + ", time : " + str(self.training_time) + " s\n") fp.write( str(epochs) + ", " + str(batch_size) + ", " + str(beta1) + ", " + str(beta2) + ", " + str(acc) + ", " + str(time) + "\n") fp.close() comms = Communication.Communication() rank = comms.comm.Get_rank() world_size = comms.comm.Get_size() T = 200 M = world_size DATA_SET = "cod-rna" DATA_SOURCE = Constant.Constant.SOURCE_COD_RNA FEATURES = Constant.Constant.COD_RNA_F SAMPLES = Constant.Constant.COD_RNA_S SPLIT = Constant.Constant.SPLIT_COD_RNA TRAINING_FILE = Constant.Constant.TRAINING_FILE_COD_RNA TESTING_FILE = Constant.Constant.TESTING_FILE_COD_RNA TRAINING_SAMPLES = Constant.Constant.TRAINING_SAMPLES_COD_RNA TESTING_SAMPLES = Constant.Constant.TESTING_SAMPLES_COD_RNA REPITITIONS = 1
class SendRecvExample: comms = Communication.Communication() def get_data(self, rank): if (rank == 0): return np.array([1, 1], dtype='i') if (rank == 1): return np.array([2, 1], dtype='i') if (rank == 2): return np.array([3, 1], dtype='i') if (rank == 3): return np.array([4, 2], dtype='i') def example(self): world_rank = self.comms.comm.Get_rank() world_size = self.comms.comm.Get_size() dsize = 2 source = 0 input = np.array([0, 0], dtype='i') master_input = np.array([0, 0], dtype='i') count = 0 partner_rank = 1 dest = -1 p = np.random.randint(4) exec_time = 0 exec_time -= time.time() max_itr = 10000 for i in range(0, max_itr): if (world_rank == 0): if (world_size == 1): dest = world_rank else: dest = world_rank + 1 master_input = self.get_data(world_rank) # np.asarray(master_input, dtype='i') self.comms.send(input=master_input, dtype=self.comms.mpi.INT, dest=dest, tag=0) if (world_size == 1): source = world_rank else: source = world_size - 1 data = self.comms.recv(source=source, dtype=self.comms.mpi.INT, tag=0, size=dsize) #print("I am Master " + str(world_rank) + ", I received from " +str(source) +" : ", data) else: source = world_rank - 1 data = self.comms.recv(source=source, dtype=self.comms.mpi.INT, tag=0, size=dsize) #print("I am slave " + str(world_rank) + ", I received from " +str(source) +" : ", data) data = self.get_data(world_rank) #print("I am slave " + str(world_rank) + ", I sent ", data) dest = (world_rank + 1) % world_size # np.asarray(data, dtype='i') self.comms.send(input=data, dtype=self.comms.mpi.INT, dest=dest, tag=0) exec_time += time.time() if (i == (max_itr - 1)): if (world_rank == 0): exec_time = exec_time / 10 print("Execution Time : ", exec_time) def example1(self): world_rank = self.comms.comm.Get_rank() world_size = self.comms.comm.Get_size() dsize = 2 source = 0 input = np.array([0, 1, 2, 3, 4], dtype='i') master_input = np.array([0, 1, 2, 3, 4], dtype='i') count = 0 partner_rank = 1 dest = 0 if (world_rank == 0): if (source == 0): print('Starting the programme ...', master_input) if (world_size == 1): dest = world_rank else: dest = world_rank + 1 master_input = master_input * (world_rank + 1) #np.asarray(master_input, dtype='i') self.comms.send(input=master_input, dtype=self.comms.mpi.INT, dest=dest, tag=0) if (world_size == 1): source = world_rank else: source = world_size - 1 data = self.comms.recv(source=source, dtype=self.comms.mpi.INT, tag=0, size=dsize) print("I am Master " + str(world_rank) + ", I received : ", data) else: source = world_rank - 1 data = self.comms.recv(source=source, dtype=self.comms.mpi.INT, tag=0, size=5) print("I am slave " + str(world_rank) + ", I received : ", data) data = data * world_rank print("I am slave " + str(world_rank) + ", I sent ", data) dest = (world_rank + 1) % world_size #np.asarray(data, dtype='i') self.comms.send(input=data, dtype=self.comms.mpi.INT, dest=dest, tag=0) def example2(self): comms = Communication.Communication() max_itr = 10000 rank = self.comms.comm.Get_rank() size = self.comms.comm.Get_size() input = np.array(rank, dtype='i') print("Simple: Input :" + str(input) + " From Rank : " + str(rank)) # initialize the numpy arrays that store the results from reduce operation # output_max = np.array(0, 'i') exec_time = 0 exec_time -= time.time() for i in range(0, max_itr): output_sum = np.array(0, 'i') # perform reduction based on sum and maximum # self.comms.allreduce(input=input, output=output_max, op=self.comms.mpi.MAX, dtype=self.comms.mpi.INT) self.comms.allreduce(input=input, output=output_sum, op=self.comms.mpi.SUM, dtype=self.comms.mpi.INT) if (rank == 0): k = 1 # print("Simple: Output Max : " + str(output_max) + ", from Rank " + str(rank) + "\n") #print("Simple: Output Sum : " + str(output_sum) + ", from Rank " + str(rank) + "\n") exec_time += time.time() if (i == (max_itr - 1)): if (rank == 0): exec_time = exec_time / 10 print("Execution Time : ", exec_time)
class SendRecvExample: comms = Communication.Communication() def get_data(self, rank): if (rank == 0): return np.array([1, 1], dtype='i') if (rank == 1): return np.array([2, 1], dtype='i') if (rank == 2): return np.array([3, 1], dtype='i') if (rank == 3): return np.array([4, 2], dtype='i') def example(self): rank = self.comms.comm.Get_rank() dsize = 2 if (rank == 0): input = self.get_data(rank) self.comms.send(input=input, dtype=self.comms.mpi.INT, dest=1, tag=11) print("Sending Data : " + str(input) + ", from Rank " + str(rank) + "\n") elif (rank == 1): data = self.comms.recv(source=0, dtype=self.comms.mpi.INT, tag=11, size=dsize) print("Receiving Data : " + str(data) + ", from Rank " + str(rank) + "\n") if (rank == 1): input = self.get_data(rank) self.comms.send(input=input, dtype=self.comms.mpi.INT, dest=0, tag=11) print("Sending Data : " + str(input) + ", from Rank " + str(rank) + "\n") elif (rank == 0): data = self.comms.recv(source=1, dtype=self.comms.mpi.INT, tag=11, size=dsize) print("Receiving Data : " + str(data) + ", from Rank " + str(rank) + "\n") def threadExample(self): world_rank = self.comms.comm.Get_rank() world_size = self.comms.comm.Get_size() client = Client() exec_time = 0 exec_time -= time.time() a = client.submit( self.sendToRank, 1, world_size) # calls inc(10) in background thread or process b = client.submit( self.recvFromRank, 0, world_size) # calls inc(20) in background thread or process print(a.result(), b.result()) exec_time += time.time() print("Dask Time Taken : " + str(exec_time)) def myfunc(self, i): print "sleeping 5 sec from thread %d" % i time.sleep(5) print "finished sleeping from thread %d" % i def callmyfunc(self): t = Thread(target=self.myfunc, args=(1, )) t.start() def sendToRank(self, world_rank, world_size): send_data = self.get_data(world_rank) dest = world_rank self.comms.send(input=send_data, dtype=self.comms.mpi.INT, dest=dest, tag=11) print(world_rank, "Send Data ", send_data) def recvFromRank(self, world_rank, world_size): data = self.comms.recv(source=world_rank, dtype=self.comms.mpi.INT, tag=11, size=2) print(world_rank, "Data ", data) def do_ring(self): world_rank = self.comms.comm.Get_rank() world_size = self.comms.comm.Get_size() t1 = Thread(target=self.sendToRank, args=( world_rank, world_size, )) t2 = Thread(target=self.recvFromRank, args=( world_rank, world_size, )) t1.start() t2.start()