Exemple #1
0
    # Clone model
    net = meta_net.clone()
    optimizer = get_optimizer(net, state)
    # load state of base optimizer?

    # Sample base task from Meta-Train
    train = meta_train.get_random_task(args.classes, args.train_shots
                                       or args.shots)
    train_iter = make_infinite(DataLoader(train, args.batch, shuffle=True))

    # Update fast net
    loss = do_learning(net, optimizer, train_iter, args.iterations)
    state = optimizer.state_dict()  # save optimizer state

    # Update slow net
    meta_net.point_grad_to(net)
    meta_optimizer.step()

    # Meta-Evaluation
    if meta_iteration % args.validate_every == 0:
        print '\n\nMeta-iteration', meta_iteration
        print '(started at {})'.format(args.start_meta_iteration)
        print 'Meta LR', meta_lr

        for (meta_dataset, mode) in [(meta_train, 'train'),
                                     (meta_test, 'val')]:

            train, test = meta_dataset.get_random_task_split(
                args.classes, train_K=args.shots, test_K=5)  # is that 5 ok?
            train_iter = make_infinite(
                DataLoader(train, args.batch, shuffle=True))
Exemple #2
0
            DataLoader(train, args.train_batch, shuffle=True))

        # Update fast net
        loss = do_learning(net, optimizer, train_iter, args.train_iters)
        state = optimizer.state_dict()  # save optimizer state
        batch_nets.append(net)

    batch_params = [list(net.parameters()) for net in batch_nets]
    batch_params = zip(*batch_params)
    for mean_param, batch_param in zip(mean_net.parameters(), batch_params):
        np_mean_param = np.mean([param.data.numpy() for param in batch_param],
                                axis=0)
        mean_param.data = torch.FloatTensor(np_mean_param)

    # Update slow net
    meta_net.point_grad_to(mean_net)
    meta_optimizer.step()

    # Meta-Evaluation
    if meta_iteration % args.validate_every == 0:
        print '\n\nMeta-iteration', meta_iteration
        print '(started at {})'.format(args.start_meta_iteration)
        print 'Meta LR', meta_lr

        for (meta_dataset, mode) in [(meta_train, 'train'),
                                     (meta_test, 'val')]:

            net = meta_net.clone()
            optimizer = get_optimizer(net,
                                      state)  # do not save state of optimizer
            train, test = meta_dataset.get_random_task_split(