Пример #1
0
    def __init__(self, experiment, device, dataset, algorithm, model,
                 batch_size, learning_rate, beta, L_k, num_glob_iters,
                 local_epochs, optimizer, num_users, times, cutoff, args):
        super().__init__(experiment, device, dataset, algorithm, model[0],
                         batch_size, learning_rate, beta, L_k, num_glob_iters,
                         local_epochs, optimizer, num_users, times, args)

        # Initialize data for all  users
        self.K = 0

        # total_users = len(dataset[0][0])
        self.sub_data = cutoff
        if (self.sub_data):
            randomList = self.get_partion(self.total_users)
        for i in range(self.total_users):
            id, train, test = read_user_data(i, dataset[0], dataset[1])
            print("User ", id, ": Numb of Training data", len(train))
            if (self.sub_data):
                if (i in randomList):
                    train, test = self.get_data(train, test)
            user = UserAVG(device, id, train, test, model, batch_size,
                           learning_rate, beta, L_k, local_epochs, optimizer)
            self.users.append(user)
            self.total_train_samples += user.train_samples

        print("Fraction number of users / total users:", num_users, " / ",
              self.total_users)
        print("Finished creating FedAvg server.")
Пример #2
0
    def __init__(self, experiment, device, dataset, algorithm, model,
                 batch_size, learning_rate, beta, L_k, num_glob_iters,
                 local_epochs, optimizer, num_users, K, personal_learning_rate,
                 times, cutoff):
        super().__init__(experiment, device, dataset, algorithm, model[0],
                         batch_size, learning_rate, beta, L_k, num_glob_iters,
                         local_epochs, optimizer, num_users, times)

        # Initialize data for all  users
        self.K = K
        self.personal_learning_rate = personal_learning_rate

        total_users = len(dataset[0][0])
        self.sub_data = cutoff
        if (self.sub_data):
            randomList = self.get_partion(total_users)

        for i in range(total_users):
            id, train, test = read_user_data(i, dataset[0], dataset[1])
            if (self.sub_data):
                if (i in randomList):
                    train, test = self.get_data(train, test)

            user = UserpFedMe(device, id, train, test, model, batch_size,
                              learning_rate, beta, L_k, local_epochs,
                              optimizer, K, personal_learning_rate)
            self.users.append(user)
            self.total_train_samples += user.train_samples
        print("Number of users / total users:", num_users, " / ", total_users)
        print("Finished creating pFedMe server.")
Пример #3
0
    def __init__(self, dataset, algorithm, model, batch_size, learning_rate, L,
                 num_glob_iters, local_epochs, users_per_round, similarity,
                 noise, times):
        super().__init__(dataset, algorithm, model[0], batch_size,
                         learning_rate, L, num_glob_iters, local_epochs,
                         users_per_round, similarity, noise, times)
        self.control_norms = []

        # Initialize data for all  users
        data = read_data(dataset)
        total_users = len(data[0])
        for i in range(total_users):
            id, train, test = read_user_data(i, data, dataset)
            user = UserSCAFFOLD(id, train, test, model, batch_size,
                                learning_rate, L, local_epochs)
            self.users.append(user)
            self.total_train_samples += user.train_samples

        if self.noise:
            self.communication_thresh = rayleigh.ppf(
                1 - users_per_round / total_users)  # h_min

        print("Number of users / total users:", users_per_round, " / ",
              total_users)

        self.server_controls = [
            torch.zeros_like(p.data) for p in self.model.parameters()
            if p.requires_grad
        ]

        print("Finished creating SCAFFOLD server.")
Пример #4
0
    def __init__(self, dataset,algorithm, model, batch_size, learning_rate, beta, lamda, num_glob_iters,
                 local_epochs, optimizer, num_users,times):
        super().__init__(dataset,algorithm, model[0], batch_size, learning_rate, beta, lamda, num_glob_iters,
                         local_epochs, optimizer, num_users, times)

        # Initialize data for all  users
        data = read_data(dataset)
        total_users = len(data[0])
        for i in range(total_users):
            id, train , test = read_user_data(i, data, dataset)
            user = UserPerAvg(id, train, test, model, batch_size, learning_rate, beta, lamda, local_epochs, optimizer ,total_users , num_users)
            self.users.append(user)
            self.total_train_samples += user.train_samples
        print("Number of users / total users:",num_users, " / " ,total_users)
        print("Finished creating Local Per-Avg.")
Пример #5
0
    def __init__(self, experiment, device, dataset, algorithm, model,
                 batch_size, learning_rate, beta, L_k, num_glob_iters,
                 local_epochs, optimizer, num_users, K, times, cutoff):
        super().__init__(experiment, device, dataset, algorithm, model[0],
                         batch_size, learning_rate, beta, L_k, num_glob_iters,
                         local_epochs, optimizer, num_users, times)

        # Initialize data for all  users
        #subset data
        self.sub_data = cutoff
        self.data_set_name = dataset[1]
        self.K = K
        total_users = len(dataset[0][0])
        N = total_users
        b = np.random.uniform(0, 1, size=(N, N))
        b_symm = (b + b.T) / 2
        b_symm[b_symm < 0.25] = 0
        self.alk_connection = b_symm

        #np.random.seed(0)
        if (self.sub_data):
            randomList = self.get_partion(total_users)

        for i in range(total_users):
            id, train, test = read_user_data(i, dataset[0], dataset[1])
            if (self.sub_data):
                if (i in randomList):
                    train, test = self.get_data(train, test)

            user = UserFedU(device, id, train, test, model, batch_size,
                            learning_rate, beta, L_k, K, local_epochs,
                            optimizer)

            self.users.append(user)
            self.total_train_samples += user.train_samples

        print("Fraction number of users / total users:", num_users, " / ",
              total_users)
        print("Finished creating SSGD server.")
Пример #6
0
    def __init__(self, experiment, device, dataset, algorithm, model,
                 batch_size, learning_rate, beta, L_k, num_glob_iters,
                 local_epochs, optimizer, num_users, times, cutoff):
        super().__init__(experiment, device, dataset, algorithm, model[0],
                         batch_size, learning_rate, beta, L_k, num_glob_iters,
                         local_epochs, optimizer, num_users, times)

        self.K = 0
        self.sub_data = cutoff
        total_users = len(dataset[0][0])
        #np.random.seed(0)
        if (self.sub_data):
            randomList = self.get_partion(total_users)

        # Initialize data for all  users
        total_users = len(dataset[0][0])
        train_all = []
        test_all = []

        for i in range(total_users):
            id, train, test = read_user_data(i, dataset[0], dataset[1])
            if (self.sub_data):
                if (i in randomList):
                    train, test = self.get_data(train, test)

            train_all += train
            test_all += test

        id = "0001"

        user = UserGlobal(device, id, train_all, test_all, model, batch_size,
                          learning_rate, beta, L_k, local_epochs, optimizer)
        self.users.append(user)
        self.total_train_samples = len(train_all)

        print("Finished creating Global model.")
Пример #7
0
    def __init__(self, dataset, algorithm, model, batch_size, learning_rate, L,
                 num_glob_iters, local_epochs, users_per_round, similarity,
                 noise, times):
        super().__init__(dataset, algorithm, model[0], batch_size,
                         learning_rate, L, num_glob_iters, local_epochs,
                         users_per_round, similarity, noise, times)

        # Initialize data for all  users
        data = read_data(dataset)
        total_users = len(data[0])
        for i in range(total_users):
            id, train, test = read_user_data(i, data, dataset)
            user = UserAVG(id, train, test, model, batch_size, learning_rate,
                           L, local_epochs)
            self.users.append(user)
            self.total_train_samples += user.train_samples

        if self.noise:
            self.communication_thresh = rayleigh.ppf(
                1 - users_per_round / total_users)  # h_min

        print("Number of users / total users:", users_per_round, " / ",
              total_users)
        print("Finished creating FedAvg server.")
Пример #8
0
 def __init__(self, dataset, algorithm, model, async_process, batch_size,
              learning_rate, lamda, beta, num_glob_iters, local_epochs,
              optimizer, num_users, user_labels, niid, times, data_load,
              extra):
     self.dataset = dataset
     self.model = copy.deepcopy(model)
     self.algorithm = algorithm
     self.optimizer = optimizer
     self.batch_size = batch_size
     self.learning_rate = learning_rate
     self.async_process = async_process
     self.lamda = lamda
     self.beta = beta
     self.times = 8
     self.data_load = data_load
     self.extra = extra
     self.num_users = num_users
     self.num_glob_iters = num_glob_iters
     self.local_epochs = local_epochs
     self.user_labels = user_labels
     self.niid = niid
     self.users = []
     self.local_acc = []
     self.avg_local_acc = []
     self.avg_local_train_acc = []
     self.avg_local_train_loss = []
     self.server_acc = []
     # old data split
     data = read_data(dataset, niid, num_users, user_labels)
     self.num_users = num_users
     test_data = []
     # id, train, test = read_user_data(0, data, dataset)
     # if algorithm == 'FedAvg':
     #     user = UserFedAvg(id, train, test, model, async_process, batch_size, learning_rate, lamda, beta, local_epochs, optimizer, data_load, self.times)
     # if algorithm == 'ASO':
     #     user = UserASO(id, train, test, model, async_process, batch_size, learning_rate, lamda, beta, local_epochs, optimizer, data_load, self.times)
     # if algorithm == 'LGP':
     #     user = UserLGP(id, train, test, model, async_process, batch_size, learning_rate, lamda, beta, local_epochs, optimizer, data_load, self.times)
     # if algorithm == 'PerFed':
     #     user = UserPerFed(id, train, test, model, async_process, batch_size, learning_rate, lamda, beta, local_epochs, optimizer, data_load, self.times)
     # self.users.append(user)
     # test_data.extend(test)
     for i in range(self.times):
         id, train, test = read_user_data(i, data, dataset)
         if algorithm == 'FedAvg':
             user = UserFedAvg(id, train, test, model, async_process,
                               batch_size, learning_rate, lamda, beta,
                               local_epochs, optimizer, data_load, i + 9)
         if algorithm == 'ASO':
             user = UserASO(id, train, test, model, async_process,
                            batch_size, learning_rate, lamda, beta,
                            local_epochs, optimizer, data_load, i + 9)
         if algorithm == 'LGP':
             user = UserLGP(id, train, test, model, async_process,
                            batch_size, learning_rate, lamda, beta,
                            local_epochs, optimizer, data_load, i + 9)
         if algorithm == 'PerFed':
             user = UserPerFed(id, train, test, model, async_process,
                               batch_size, learning_rate, lamda, beta,
                               local_epochs, optimizer, data_load, i + 9)
         if algorithm == 'FedAsync':
             user = UserFedAsync(id, train, test, model, async_process,
                                 batch_size, learning_rate, lamda, beta,
                                 local_epochs, optimizer, data_load, i + 9)
         self.users.append(user)
         test_data.extend(test)
     for i in range(self.times, self.num_users):
         id, train, test = read_user_data(i, data, dataset)
         if algorithm == 'PerFed':
             user = UserPerFed(id, train, test, model, async_process,
                               batch_size, learning_rate, lamda, beta,
                               local_epochs, optimizer, data_load)
         if algorithm == 'FedAvg':
             user = UserFedAvg(id, train, test, model, async_process,
                               batch_size, learning_rate, lamda, beta,
                               local_epochs, optimizer, data_load)
         if algorithm == 'ASO':
             user = UserASO(id, train, test, model, async_process,
                            batch_size, learning_rate, lamda, beta,
                            local_epochs, optimizer, data_load)
         if algorithm == 'LGP':
             user = UserLGP(id, train, test, model, async_process,
                            batch_size, learning_rate, lamda, beta,
                            local_epochs, optimizer, data_load)
         if algorithm == 'FedAsync':
             user = UserFedAsync(id, train, test, model, async_process,
                                 batch_size, learning_rate, lamda, beta,
                                 local_epochs, optimizer, data_load)
         self.users.append(user)
         test_data.extend(test)
     if algorithm == 'FedAvg':
         self.server = ServerFedAvg(algorithm, model, async_process,
                                    test_data, batch_size)
     if algorithm == 'PerFed':
         self.server = ServerPerFed(algorithm, model, async_process,
                                    test_data, batch_size)
     if algorithm == 'ASO':
         self.server = ServerASO(algorithm, model, async_process, test_data,
                                 batch_size)
     if algorithm == 'LGP':
         self.server = ServerLGP(algorithm, model, async_process, test_data,
                                 batch_size)
     if algorithm == 'FedAsync':
         self.server = serverFedAsync(algorithm, model, async_process,
                                      test_data, batch_size)
     for user in self.users:
         self.server.append_user(user.id, user.train_data_samples)