#         [60, "Simple_Layer", {}],
        [60, "Simple_Layer", {}],
        [pre_pooling_neurons, "Simple_Layer", {"activation": "linear"}],
    ]
struct_param_post = None
struct_param_gen_base = [
        [60, "Simple_Layer", {}],
#         [60, "Simple_Layer", {}],
        [60, "Simple_Layer", {}],
]
isParallel = False
inspect_interval = 50
save_interval = 100
filename = variational_model_PATH + "/trained_models/{0}/Net_{1}_{2}_input_{3}_({4},{5})_stat_{6}_pre_{7}_pool_{8}_context_{9}_hid_{10}_batch_{11}_back_{12}_VAE_{13}_{14}_uncer_{15}_lr_{16}_reg_{17}_actgen_{18}_actmodel_{19}_struct_{20}_{21}_core_{22}_{23}_".format(
    exp_id, exp_mode, task_id_list, input_size, num_train_tasks, num_test_tasks, statistics_output_neurons, pre_pooling_neurons, statistics_pooling, num_context_neurons, main_hidden_neurons, batch_size_task, num_backwards, is_VAE, VAE_beta, is_uncertainty_net, lr, reg_amp, activation_gen, activation_model, get_struct_str(struct_param_gen_base), optim_mode, loss_core, exp_id)
make_dir(filename)
print(filename)

# Obtain tasks:
assert len(task_id_list) == 1
dataset_filename = dataset_PATH + task_id_list[0] + "_{0}-shot.p".format(num_shots)
tasks = pickle.load(open(dataset_filename, "rb"))
tasks_train = get_torch_tasks(tasks["tasks_train"], task_id_list[0], is_cuda = is_cuda)
tasks_test = get_torch_tasks(tasks["tasks_test"], task_id_list[0], num_tasks = num_test_tasks, is_cuda = is_cuda)

# Obtain nets:
statistics_Net, generative_Net, generative_Net_logstd = get_nets(input_size = input_size, output_size = output_size, main_hidden_neurons = main_hidden_neurons,
                                          pre_pooling_neurons = pre_pooling_neurons, statistics_output_neurons = statistics_output_neurons, num_context_neurons = num_context_neurons,
                                          struct_param_pre = struct_param_pre,
                                          struct_param_gen_base = struct_param_gen_base,
                                          activation_statistics = activation_gen,
Exemple #2
0
# ## Train:

# In[ ]:

optim_mode = "indi"
dataset = 'omniglot'
num_inst = 6
meta_batch_size = 20
num_updates = 15000
lr = 1e-1
meta_lr = 1e-3
loss_fn = nn.CrossEntropyLoss()
reg_amp = 1e-9
exp = 'maml-omniglot-{0}way-{1}shot-TEST'.format(num_classes, num_inst)
make_dir("output/{0}/".format(exp))

random.seed(1337)
np.random.seed(1337)

tr_loss, tr_acc, val_loss, val_acc = [], [], [], []
mtr_loss, mtr_acc, mval_loss, mval_acc = [], [], [], []
reg_list = []

optimizer = torch.optim.Adam(master_model.parameters(), lr=meta_lr)
for i in range(num_updates):
    # Evaluate on test tasks
    #     mt_loss, mt_acc, mv_loss, mv_acc = test()
    #     mtr_loss.append(mt_loss)
    #     mtr_acc.append(mt_acc)
    #     mval_loss.append(mv_loss)
        [60, "Simple_Layer", {}],
        [60, "Simple_Layer", {}],
        [pre_pooling_neurons, "Simple_Layer", {"activation": "linear"}],
    ]
struct_param_post = None
struct_param_gen_base = [
        [60, "Simple_Layer", {}],
        [60, "Simple_Layer", {}],
        [60, "Simple_Layer", {}],
]
isParallel = False
inspect_interval = 5
save_interval = 100
filename = variational_model_PATH + "/trained_models/{0}/Net_{1}_{2}_input_{3}_({4},{5})_stat_{6}_pre_{7}_pool_{8}_context_{9}_hid_{10}_batch_{11}_back_{12}_VAE_{13}_{14}_uncer_{15}_lr_{16}_reg_{17}_actgen_{18}_actmodel_{19}_struct_{20}_{21}_core_{22}_{23}_".format(
    exp_id, exp_mode, task_id_list, input_size, num_train_tasks, num_test_tasks, statistics_output_neurons, pre_pooling_neurons, statistics_pooling, num_context_neurons, main_hidden_neurons, batch_size_task, num_backwards, is_VAE, VAE_beta, is_uncertainty_net, lr, reg_amp, activation_gen, activation_model, get_struct_str(struct_param_gen_base), optim_mode, loss_core, exp_id)
make_dir(filename)
print(filename)

if "tasks_train" not in locals():
    # Obtain tasks:
    if is_load_data:
        try:
            dataset = pickle.load(open(filename + "data.p", "rb"))
            tasks_train = dataset["tasks_train"]
            tasks_test = dataset["tasks_test"]
            print("dataset loaded.")
        except:
            print("dataset do not exist. Create one")
            tasks_train, tasks_test = get_tasks(task_id_list, num_train_tasks, num_test_tasks, task_settings = task_settings, is_cuda = is_cuda, forward_steps = list(range(1, max_forward_steps + 1)))
            dataset = {"tasks_train": tasks_train, "tasks_test": tasks_test}
    #         pickle.dump(dataset, open(filename + "data.p", "wb"))