def fine_tune_model():
    print(f'\n\nfine tuning model...')
    data_manager = DataManager()

    plotter = Plotter()

    train_manager = get_train_manager(data_manager)
    n_filters_list = []
    train_losses, test_losses = [], []
    for i in range(1, 11):
        n_filters = 4 * i
        net = Net(conv1_out_channels=n_filters)
        optimizer = optim.SGD(net.parameters(), lr=0.001, momentum=0.9)

        train_manager.init_model(net, optimizer)
        train_and_save_model(train_manager)
        (train_loss,
         train_accuracy), (test_loss,
                           test_accuracy) = train_manager.get_losses()
        n_filters_list.append(n_filters)
        train_losses.append(train_loss)
        test_losses.append(test_loss)
    plotter.plot_filters_losses(n_filters_list, train_losses, test_losses)

    return train_losses, test_losses, n_filters_list
def test_no_spatial_structure():
    print(f'testing no spatial structure...')
    fixed_shuffle_data_manager = DataManager(shuffle_type='fixed')
    fresh_shuffle_data_manager = DataManager(shuffle_type='fresh')

    for name, data_manager in [('fixed_shuffle', fixed_shuffle_data_manager),
                               ('fresh_shuffle', fresh_shuffle_data_manager)]:
        print(f'\ntesting {name}...')
        train_manager = get_train_manager(data_manager)
        net = Net()
        optimizer = optim.SGD(net.parameters(), lr=0.001, momentum=0.9)
        train_manager.init_model(net, optimizer)
        train_manager.train()
        print(f'\ntesting no spatial...')
        train_res, test_res = train_manager.get_losses()
        print(f'train loss: {train_res[0]}, train accuracy: {train_res[1]}\n'
              f'test loss: {test_res[0]}, test accuracy: {test_res[1]}')
def test_locality_of_receptive_field():
    print(f'testing locality of receptive field...')

    noshuffle_data_manager = DataManager(shuffle_type='none')
    shuffled_data_manager = DataManager(shuffle_type='fixed')

    for name, data_manager in [('no_shuffle', noshuffle_data_manager),
                               ('shuffle', shuffled_data_manager)]:
        print(f'\ntesting {name}...')
        train_manager = get_train_manager(data_manager)
        net = Net()
        optimizer = optim.SGD(net.parameters(), lr=0.001, momentum=0.9)
        train_manager.init_model(net, optimizer)
        train_manager.train()
        print(f'\ntesting local field...')
        train_res, test_res = train_manager.get_losses()
        print(f'train loss: {train_res[0]}, train accuracy: {train_res[1]}\n'
              f'test loss: {test_res[0]}, test accuracy: {test_res[1]}')
Exemple #4
0
# #     print(trainset[i])
#      print(trainset[i]["sensitive"])

# print("Testset:")
# for i in range(len(testset)):
#     print("testset " + str(i))
#     print(testset[i]["sensitive"])

#
writer = SummaryWriter("runs/noConv1D_ascad_desync_50_3")
#TODO: Change the model for the one of the paper
net = Net()

#TODO: propose NLLloss (Categorical Cross Entropy), Adam optimizer and Cyclic Learning Rate
criterion = nn.NLLLoss()
optimizer = optim.Adam(net.parameters(), lr=float(config.train.lr))

scheduler = optim.lr_scheduler.OneCycleLR(optimizer,
                                          max_lr=float(config.train.lr),
                                          epochs=50,
                                          steps_per_epoch=len(trainloader))
# TODO: plot in tensorboard the curves loss and accuracy for train and val
for epoch in range(
        config.train.epochs):  # loop over the dataset multiple times
    print('Epoch {}/{}'.format(epoch + 1, config.train.epochs))
    print('-' * 10)
    for phase in ["train", "val"]:
        if phase == "train":
            net.train()
        else:
            net.eval()
Exemple #5
0
                      noise_type=noise_type,
                      noise_level=noise_level,
                      init_val=init_val).to(device, dtype)

            save_model(net,
                       experiment_name,
                       0,
                       noise_type,
                       noise_level,
                       model_dir=model_dir)
        else:
            print("starting from epoch {}".format(start_epoch))
            net = recreate_model(model_to_load, dataset=dataset, act=act)

        # optimiser parameters
        optimiser = get_optimiser(net.parameters(), op, learning_rate,
                                  momentum)

        # training criterion
        criterion = torch.nn.CrossEntropyLoss()

        # train network
        train(net,
              train_loader,
              test_loader,
              criterion,
              optimiser,
              epochs,
              noise_type,
              noise_level,
              save=True,