Пример #1
0
def create_model(args, model_name, output_dim):
    logging.info("create_model. model_name = %s, output_dim = %s" %
                 (model_name, output_dim))
    model = None
    if model_name == "lr" and args.dataset == "mnist":
        logging.info("LogisticRegression + MNIST")
        model = LogisticRegression(28 * 28, output_dim)
    elif model_name == "cnn" and args.dataset == "femnist":
        logging.info("CNN + FederatedEMNIST")
        model = CNN_DropOut(False)
    elif model_name == "resnet18_gn" and args.dataset == "fed_cifar100":
        logging.info("ResNet18_GN + Federated_CIFAR100")
        model = resnet18()
    elif model_name == "rnn" and args.dataset == "shakespeare":
        logging.info("RNN + shakespeare")
        model = RNN_OriginalFedAvg()
    elif model_name == "rnn" and args.dataset == "fed_shakespeare":
        logging.info("RNN + fed_shakespeare")
        model = RNN_OriginalFedAvg()
    elif model_name == "lr" and args.dataset == "stackoverflow_lr":
        logging.info("lr + stackoverflow_lr")
        model = LogisticRegression(10004, output_dim)
    elif model_name == "cnn" and args.dataset == "stackoverflow_nwp":
        logging.info("CNN + stackoverflow_nwp")
        model = RNN_StackOverFlow()
    return model
Пример #2
0
def main():
    ROOT_PATH = args.root_path
    RUN_NAME = str(args.mode) + \
               "-id" + str(args.run_id) + \
               "-group_id" + str(args.group_id) + \
               "-n" + str(args.client_number) + \
               "-symm" + str(args.b_symmetric) + \
               "-tu" + str(args.topology_neighbors_num_undirected) + \
               "-td" + str(args.topology_neighbors_num_directed) + \
               "-lr" + str(args.learning_rate),

    wandb.init(project="fedml", name=str(RUN_NAME), config=args)

    logging.basicConfig(
        level=logging.INFO,
        format=
        ' - %(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
        datefmt='%a, %d %b %Y %H:%M:%S')

    logging.info('Decentralized Online Learning.')

    # fix random seeds
    seed = 1234
    random.seed(seed)
    np.random.seed(seed)
    torch.manual_seed(seed)

    # create id list
    client_number = args.client_number
    client_id_list = [i for i in range(client_number)]
    print(client_id_list)

    # load data
    iteration_number_T = args.iteration_number
    sample_num_in_total = client_number * iteration_number_T
    beta = args.beta

    data_name = args.data_name
    data_path = ""
    if data_name == "SUSY":
        data_path = ROOT_PATH + "SUSY/SUSY.csv"
        input_dim = 18
    elif data_name == "RO":
        data_path = ROOT_PATH + "room_occupancy/datatraining.txt"
        input_dim = 5
    else:
        input_dim = 5
    data_loader = DataLoader(data_name, data_path, client_id_list,
                             sample_num_in_total, beta)
    streaming_data = data_loader.load_datastream()

    # create model
    model = LogisticRegression(input_dim, args.output_dim)
    model_cache = LogisticRegression(input_dim, args.output_dim)

    # start training
    FedML_decentralized_fl(client_number, client_id_list, streaming_data,
                           model, model_cache, args)
Пример #3
0
def main():
    # test the data loader
    # Hyper Parameters
    input_size = 784
    num_classes = 10
    num_epochs = 10
    batch_size = 10
    learning_rate = 0.03

    np.random.seed(0)
    torch.manual_seed(10)

    device = torch.device("cuda:0")
    client_num, train_data_num, test_data_num, train_data_global, test_data_global, \
    train_data_local_num_dict, train_data_local_dict, test_data_local_dict, \
    class_num = load_partition_data_mnist(batch_size)

    model = LogisticRegression(input_size, num_classes).to(device)

    # Loss and Optimizer
    # Softmax is internally computed.
    # Set parameters to be updated.
    criterion = nn.CrossEntropyLoss().to(device)
    optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)

    # Training the Model
    for epoch in range(num_epochs):
        for i, (images, labels) in enumerate(train_data_global):
            images = images.to(device)
            labels = labels.to(device)

            # Forward + Backward + Optimize
            optimizer.zero_grad()
            outputs = model(images)
            loss = criterion(outputs, labels)
            loss.backward()
            optimizer.step()

            if (i + 1) % 100 == 0:
                print('Epoch: [%d/%d], Step: [%d/%d], Loss: %.4f' %
                      (epoch + 1, num_epochs, i + 1, len(train_data_global),
                       loss.item()))

    # Test the Model
    correct = 0
    total = 0
    for images, labels in test_data_global:
        images = images.to(device)
        labels = labels.to(device)
        outputs = model(images)
        _, predicted = torch.max(outputs.data, 1)
        total += labels.size(0)
        correct += (predicted == labels).sum()

    # 86%
    print('Accuracy of the model on the 7371 test images: %d %%' %
          (100 * correct // total))
Пример #4
0
def create_model(args, model_name, output_dim):
    model = None
    if model_name == "lr" and args.dataset == "mnist":
        model = LogisticRegression(28 * 28, output_dim)
        args.client_optimizer = 'sgd'
    elif model_name == "rnn" and args.dataset == "shakespeare":
        model = RNN_OriginalFedAvg(28 * 28, output_dim)
        args.client_optimizer = 'adam'
    elif model_name == "resnet56":
        model = resnet56(class_num=output_dim)
    elif model_name == "mobilenet":
        model = mobilenet(class_num=output_dim)
    return model
Пример #5
0
def create_model(args, model_name, output_dim):
    logging.info("create_model. model_name = %s, output_dim = %s" %
                 (model_name, output_dim))
    model = None
    if model_name == "lr" and args.dataset == "mnist":
        logging.info("LogisticRegression + MNIST")
        model = LogisticRegression(28 * 28, output_dim)
    elif model_name == "rnn" and args.dataset == "shakespeare":
        logging.info("RNN + shakespeare")
        model = RNN_OriginalFedAvg(28 * 28, output_dim)
    elif model_name == "cnn" and args.dataset == "femnist":
        logging.info("CNN + FederatedEMNIST")
        model = CNN_OriginalFedAvg(False)
    return model