def main(num_users=5,
         loc_ep=10,
         Numb_Glob_Iters=100,
         lamb=0,
         learning_rate=0.01,
         alg='fedprox',
         weight=True,
         batch_size=0,
         dataset="mnist"):
    # suppress tf warnings
    tf.logging.set_verbosity(tf.logging.WARN)

    # parse command line arguments
    options, learner, optimizer = read_options(num_users, loc_ep,
                                               Numb_Glob_Iters, lamb,
                                               learning_rate, alg, weight,
                                               batch_size, dataset)

    # read data
    train_path = os.path.join('data', options['dataset'], 'data', 'train')
    test_path = os.path.join('data', options['dataset'], 'data', 'test')
    dataset = read_data(train_path, test_path)

    # call appropriate trainer
    t = optimizer(options, learner, dataset)
    t.train()
Ejemplo n.º 2
0
def main(num_users=5,
         loc_ep=10,
         Numb_Glob_Iters=100,
         lamb=0,
         learning_rate=0.01,
         hyper_learning_rate=0.01,
         alg='fedprox',
         weight=True,
         batch_size=0,
         dataset="mnist"):
    # suppress tf warnings
    tf.logging.set_verbosity(tf.logging.WARN)
    model = MODEL_TYPE + ".py"
    if (DATA_SET == "cifar100"):
        learning_rate = 0.001
    # parse command line arguments
    options, learner_model, trainer = read_options(
        num_users, loc_ep, Numb_Glob_Iters, lamb, learning_rate,
        hyper_learning_rate, alg, weight, batch_size, dataset, model)

    # read data
    train_path = os.path.join('data', options['dataset'], 'data', 'train')
    test_path = os.path.join('data', options['dataset'], 'data', 'test')
    dataset = read_data(train_path, test_path)

    # call appropriate trainer
    t = trainer(options, learner_model, dataset)
    t.train()
Ejemplo n.º 3
0
def main():
    # suppress tf warnings
    tf.logging.set_verbosity(tf.logging.WARN)

    # parse command line arguments
    options, learner, optimizer = read_options()

    # read data
    train_path = os.path.join('data', options['dataset'], 'data', 'train')
    test_path = os.path.join('data', options['dataset'], 'data', 'test')
    dataset = read_data(train_path, test_path)

    # call appropriate trainer
    for i in range(options['times']):
        # Set seeds
        random.seed(1 + i)
        np.random.seed(12 + i)
        tf.set_random_seed(123 + i)
        print('......time for runing......', i)
        t = optimizer(options, learner, dataset)
        t.train(i)

    average_data(num_users=options['clients_per_round'],
                 loc_ep1=options['num_epochs'],
                 Numb_Glob_Iters=options['num_rounds'],
                 lamb=options['lamb'],
                 learning_rate=options['learning_rate'],
                 hyper_learning_rate=options['hyper_learning_rate'],
                 algorithms=options['optimizer'],
                 batch_size=options['batch_size'],
                 dataset=options['dataset'],
                 rho=options['rho'],
                 times=options['times'])
Ejemplo n.º 4
0
def main():
    # suppress tf warnings
    tf.logging.set_verbosity(tf.logging.WARN)
    
    # parse command line arguments
    options, learner, optimizer = read_options()

    # read data
    train_path = os.path.join('data', options['dataset'], 'data', 'train')
    test_path = os.path.join('data', options['dataset'], 'data', 'test')
    dataset = read_data(train_path, test_path)

    # call appropriate trainer
    t = optimizer(options, learner, dataset)
    t.train()
Ejemplo n.º 5
0
def main(num_users=5, loc_ep=10, alg='fedprox', weight = False):
    # suppress tf warnings
    tf.logging.set_verbosity(tf.logging.WARN)
    
    # parse command line arguments
    options, learner, optimizer = read_options(num_users, loc_ep, alg, weight)

    # read data
    train_path = os.path.join('data', options['dataset'], 'data', 'train')
    test_path = os.path.join('data', options['dataset'], 'data', 'test')
    dataset = read_data(train_path, test_path)

    # call appropriate trainer
    t = optimizer(options, learner, dataset)
    t.train()
Ejemplo n.º 6
0
def main(num_users=5,
         loc_ep=10,
         Numb_Glob_Iters=100,
         lamb=0,
         learning_rate=0.01,
         hyper_learning_rate=0.01,
         alg='fedprox',
         weight=True,
         batch_size=0,
         times=10,
         rho=0,
         dataset="mnist"):
    # suppress tf warnings
    tf.logging.set_verbosity(tf.logging.WARN)

    # parse command line arguments
    options, learner, optimizer = read_options(
        num_users, loc_ep, Numb_Glob_Iters, lamb, learning_rate,
        hyper_learning_rate, alg, weight, batch_size, times, rho, dataset)

    # read data
    train_path = os.path.join('data', options['dataset'], 'data', 'train')
    test_path = os.path.join('data', options['dataset'], 'data', 'test')
    dataset = read_data(train_path, test_path)

    # call appropriate trainer
    for i in range(times):
        # Set seeds
        random.seed(1 + i)
        np.random.seed(12 + i)
        tf.set_random_seed(123 + i)
        print('......time for runing......', i)
        t = optimizer(options, learner, dataset)
        t.train(i)
    average_data(num_users=num_users,
                 loc_ep1=loc_ep,
                 Numb_Glob_Iters=Numb_Glob_Iters,
                 lamb=lamb,
                 learning_rate=learning_rate,
                 hyper_learning_rate=hyper_learning_rate,
                 algorithms=alg,
                 batch_size=batch_size,
                 dataset=dataset,
                 rho=rho,
                 times=times)
Ejemplo n.º 7
0
def main():
    # suppress tf warnings
    tf.logging.set_verbosity(tf.logging.WARN)

    # parse command line arguments
    options, learner, optimizer = read_options()

    # read data
    path = "/".join(os.path.abspath(__file__).split('/')[:-1])
    log_path = os.path.join(os.path.abspath('.'), 'out_new',
                            options['dataset'])
    if not os.path.exists(log_path):
        os.makedirs(log_path)
    train_path = os.path.join(path, 'data/train')
    test_path = os.path.join(path, 'data/test')
    dataset = read_data(train_path, test_path)

    # call trainer
    t = optimizer(options, learner, dataset)
    t.train()