def assign_data(): ''' Assign datasets to corresponding devices ''' # load datasets for two pis X_train_1, X_train_2, y_train_1, y_train_2 = read_data() print('_' * 30 + ' check the consistency ' + '_' * 30) print('len(X_1)=:{}, len(y_1)=:{}'.format(len(X_train_1), len(y_train_1))) print('len(X_2)=:{}, len(y_2)=:{}'.format(len(X_train_2), len(y_train_2))) print('-' * 70) # send data to pis [X_train_1, y_train_1] is for pi1, [X_train_2, y_train_2] is for pi2 data_1 = np.asarray(np.hstack((X_train_1, y_train_1.reshape(-1, 1)))) data_2 = np.asarray(np.hstack((X_train_1, y_train_1.reshape(-1, 1)))) send("172.24.6.253", "data_01", 12345, data_1[:100, :]) send("172.24.6.253", "data_02", 12345, data_2[:100, :])
def main(): # id of this device ID = "01" # define local model optimizer = tf.train.GradientDescentOptimizer(0.1) model = Model(num_classes=10, optimizer=optimizer, regu_param=1e-3) # num of communication round num_rounds = 100 # num of local iterations num_epochs = 10 # receive dataset from server topic = "data_" + ID dataset = receive("172.24.6.253", topic=topic, 12345, self_name=None, time1=None, count=1) for round in range(num_rounds): # receive model parameter from server model_params = receive("172.24.6.253", topic="global_model", 12345, self_name=None, time1=None, count=1) # check if received global model if len(model_params) == 0: print('=' * 60) print('[INFO] DID NOT RECEIVE GLOBAL MODEL !') return 0 # update local model local_model = model.solve_inner(data=dataset, num_epochs=num_epochs, batch_size=10) # upload local model to server send("172.24.6.253", "local_model", 12345, local_model)
def main(): # num of communication round num_round = 100 # num of devices num_devices = 2 # assign datasets to devices assign_data() # initialize model global_model_init = model_initialier() #print(global_model_init) ### do not change model datatype here, change inside the send function #model_change = [model_init[0].tolist(),model_init[1].tolist()] # boardcast model_init to all devices send("172.24.6.253", "global_model", 12345, global_model_init) print('send model successfully') # continue the training for 100 iterations for i in range(num_round): ## I don't know the meaning of self_name, pls specify in your code local_model = receive("172.24.6.253", "local_model", 12345, "host", 0.1, num_devices - 1) print("local_model good") # aggregate local models # assume I receive a list of local models global_model = aggregate(local_model) # braodcast globel_model to devices send("172.24.6.253", "global_model", 12345, global_model)