Пример #1
0
    # parse args
    args = args_parser()

    args.device = torch.device('cuda:{}'.format(
        args.gpu) if torch.cuda.is_available() and args.gpu != -1 else 'cpu')

    base_dir = './save/{}/{}_single_{}/{}/'.format(args.dataset, args.model,
                                                   args.opt, args.results_save)
    algo_dir = 'blr_{}_hlr{}_bm{}_hm_{}'.format(args.body_lr, args.head_lr,
                                                args.body_m, args.head_m)

    if not os.path.exists(os.path.join(base_dir, algo_dir)):
        os.makedirs(os.path.join(base_dir, algo_dir), exist_ok=True)

    # set dataset
    dataset_train, dataset_test = get_data(args, env='single')
    train_loader = torch.utils.data.DataLoader(dataset_train,
                                               batch_size=128,
                                               shuffle=True,
                                               num_workers=4)
    test_loader = torch.utils.data.DataLoader(dataset_test,
                                              batch_size=128,
                                              num_workers=4)
    dataloaders = {'train': train_loader, 'test': test_loader}

    # build a model
    net_glob = get_model(args)

    # Basically, He uniform
    if args.results_save == 'xavier_uniform':
        nn.init.xavier_uniform_(net_glob.linear.weight,
Пример #2
0
import pdb

if __name__ == '__main__':
    # parse args
    args = args_parser()
    args.device = torch.device('cuda:{}'.format(
        args.gpu) if torch.cuda.is_available() and args.gpu != -1 else 'cpu')

    base_dir = './save/{}/{}_iid{}_num{}_C{}_le{}/shard{}/{}/'.format(
        args.dataset, args.model, args.iid, args.num_users, args.frac,
        args.local_ep, args.shard_per_user, args.results_save)
    if not os.path.exists(os.path.join(base_dir, 'local')):
        os.makedirs(os.path.join(base_dir, 'local'), exist_ok=True)

    dataset_train, dataset_test, dict_users_train, dict_users_test = get_data(
        args)
    dict_save_path = os.path.join(base_dir, 'dict_users.pkl')
    with open(dict_save_path, 'rb') as handle:
        dict_users_train, dict_users_test = pickle.load(handle)

    # build model
    net_glob = get_model(args)
    net_glob.train()

    net_local_list = []
    for user_ix in range(args.num_users):
        net_local_list.append(copy.deepcopy(net_glob))

    # training
    results_save_path = os.path.join(base_dir, 'local/results.csv')
Пример #3
0
def _main(args):
    with tf.Graph().as_default():
        with tf.Session() as sess:

            input_placeholder, embeddings_tensor, phase_train_placeholder = get_model_tensors(
                args.model_path)

            img_paths, ix2names = get_data(args.data_dir)
            train_paths, test_paths, train_targets, test_targets = train_test_split(
                img_paths)
            print('Train and test image paths loaded')

            num_imgs = train_paths.shape[0]
            embed_size = embeddings_tensor.get_shape()[1]
            train_embeddings = np.zeros((num_imgs, embed_size))
            num_batches = math.ceil(num_imgs / args.batch_size)

            face_detector, face_aligner = get_face_detection_models(
                args.meta_dir)

            for i in range(num_batches):
                st_ix = i * args.batch_size
                end_ix = min(st_ix + args.batch_size, num_imgs)
                img_batch_paths = train_paths[st_ix:end_ix]
                batch_imgs, _ = detect_and_align(img_batch_paths,
                                                 args.img_size, face_detector,
                                                 face_aligner)
                batch_imgs = np.squeeze(batch_imgs)

                if len(batch_imgs.shape) == 3:
                    batch_imgs = batch_imgs[np.newaxis, ...]

                train_feed_dict = {
                    input_placeholder: batch_imgs,
                    phase_train_placeholder: False
                }
                train_embeddings[st_ix:end_ix] = sess.run(
                    embeddings_tensor, feed_dict=train_feed_dict)

            print('Embeddings Created')

            true_embeds = get_true_embeds(train_embeddings, train_targets)

            classifier = SVC(kernel='linear', probability=True)
            classifier.fit(train_embeddings, train_targets)

            train_preds = classifier.predict(train_embeddings)
            print(
                f'Train accuracy score: {accuracy_score(train_targets, train_preds)}'
            )

            with open(args.classifier_path, 'wb') as f:
                pickle.dump((classifier, ix2names, true_embeds), f)

            test_imgs, _ = detect_and_align(test_paths, args.img_size,
                                            face_detector, face_aligner)
            test_imgs = np.squeeze(test_imgs)
            test_feed_dict = {
                input_placeholder: test_imgs,
                phase_train_placeholder: False
            }
            test_embeddings = sess.run(embeddings_tensor,
                                       feed_dict=test_feed_dict)

            test_preds = classifier.predict(test_embeddings)
            print(
                f'Test accuracy score: {accuracy_score(test_targets, test_preds)}'
            )