Esempio n. 1
0
def evaluate_model_retrieval(path='',
                             net=None,
                             result_path='',
                             dataset_name='cifar10',
                             dataset_loader=cifar10_loader):
    """
    Wrapper function for the evaluation that also saves the results into the appropriate output files
    :param path:
    :param net:
    :param result_path:
    :param dataset_name:
    :param dataset_loader:
    :return:
    """
    # If a path is supplied load the model
    if path != '':
        net.cuda()
        load_model(net, path)

    _, test_loader, train_loader = dataset_loader(batch_size=128)
    results = retrieval_evaluation(net, train_loader, test_loader)

    results = {dataset_name: results}
    with open(result_path, 'wb') as f:
        pickle.dump(results, f, protocol=pickle.HIGHEST_PROTOCOL)
Esempio n. 2
0
def run_transfer(learning_rates=(0.001, 0.0001), iters=(3, 0), method='mds'):
    torch.manual_seed(12345)
    student_layers, teacher_layers, weights, loss_params, T = (3,), (3,), (1,), {}, 2
    print(method)
    transfer_name = method

    # Output paths
    output_path = 'models/aux_' + transfer_name + '.model'
    results_path = 'results/aux_' + transfer_name

    # Load a pre-trained teacher network
    student_net = Cifar_Tiny(10)

    # Use a pre-trained model
    load_model(student_net, 'models/tiny_cifar10.model')

    # Load the teacher model
    teacher_net = ResNet18(num_classes=10)
    load_model(teacher_net, 'models/resnet18_cifar10.model')

    train_loader, test_loader, train_loader_raw = cifar10_loader(batch_size=128)

    # Move the models into GPU
    student_net.cuda()
    teacher_net.cuda()

    # Perform the transfer
    W = None
    for lr, iters in zip(learning_rates, iters):

        if method == 'pkt':
            kernel_parameters = {'student': 'combined', 'teacher': 'combined', 'loss': 'combined'}
            prob_transfer(student_net, teacher_net, train_loader, epochs=iters, lr=lr,
                          teacher_layers=teacher_layers, student_layers=student_layers, layer_weights=weights,
                          kernel_parameters=kernel_parameters, loss_params=loss_params)
        else:
            assert False

    save_model(student_net, output_path)
    print("Model saved at ", output_path)

    # Perform the evaluation
    evaluate_model_retrieval(net=Cifar_Tiny(num_classes=10), path=output_path,
                             result_path=results_path + '_retrieval.pickle', layer=3)
    evaluate_model_retrieval(net=Cifar_Tiny(num_classes=10), path=output_path,
                             result_path=results_path + '_retrieval_e.pickle', layer=3, metric='l2')
Esempio n. 3
0
def evaluate_kt_methods(net_creator,
                        donor_creator,
                        donor_path,
                        transfer_loader,
                        batch_size=128,
                        donor_name='very_small_cifar10',
                        net_name='tiny_cifar',
                        transfer_name='cifar10',
                        iters=50,
                        init_model_path=None):
    # Method 1: HINT transfer
    net = net_creator()
    if init_model_path is not None:
        load_model(net, init_model_path)

    donor_net = donor_creator()
    load_model(donor_net, donor_path)

    train_loader, test_loader, train_loader_raw = transfer_loader(
        batch_size=batch_size)
    output_path = 'models/' + net_name + '_' + donor_name + '_hint_' + transfer_name + '.model'
    results_path = 'results/' + net_name + '_' + donor_name + '_hint_' + '_' + transfer_name + '.pickle'

    perform_transfer_knowledge(net,
                               donor_net,
                               transfer_loader=train_loader,
                               transfer_method='hint',
                               output_path=output_path,
                               iters=[iters],
                               learning_rates=[0.0001])
    evaluate_model_retrieval(net=Cifar_Tiny(num_classes=10),
                             path=output_path,
                             result_path=results_path)

    # Method 2: Distillation transfer
    net = net_creator()
    if init_model_path is not None:
        load_model(net, init_model_path)

    donor_net = donor_creator()
    load_model(donor_net, donor_path)

    train_loader, test_loader, train_loader_raw = transfer_loader(
        batch_size=batch_size)
    output_path = 'models/' + net_name + '_' + donor_name + '_distill_' + transfer_name + '.model'
    results_path = 'results/' + net_name + '_' + donor_name + '_distill_' + transfer_name + '.pickle'
    perform_transfer_knowledge(net,
                               donor_net,
                               transfer_loader=train_loader,
                               transfer_method='distill',
                               output_path=output_path,
                               iters=[iters],
                               learning_rates=[0.0001])
    evaluate_model_retrieval(net=Cifar_Tiny(num_classes=10),
                             path=output_path,
                             result_path=results_path)

    # Method 3: PKT transfer
    net = net_creator()
    if init_model_path is not None:
        load_model(net, init_model_path)
    donor_net = donor_creator()
    load_model(donor_net, donor_path)

    train_loader, test_loader, train_loader_raw = transfer_loader(
        batch_size=batch_size)
    output_path = 'models/' + net_name + '_' + donor_name + '_kt_' + transfer_name + '.model'
    results_path = 'results/' + net_name + '_' + donor_name + '_kt_' + transfer_name + '.pickle'
    perform_transfer_knowledge(net,
                               donor_net,
                               transfer_loader=train_loader,
                               transfer_method='pkt',
                               output_path=output_path,
                               iters=[iters],
                               learning_rates=[0.0001])
    evaluate_model_retrieval(net=Cifar_Tiny(num_classes=10),
                             path=output_path,
                             result_path=results_path)

    # Method 4: HINT (optimized) transfer
    net = net_creator()
    if init_model_path is not None:
        load_model(net, init_model_path)

    donor_net = donor_creator()
    load_model(donor_net, donor_path)

    train_loader, test_loader, train_loader_raw = transfer_loader(
        batch_size=batch_size)
    output_path = 'models/' + net_name + '_' + donor_name + '_hint_optimized_' + transfer_name + '.model'
    results_path = 'results/' + net_name + '_' + donor_name + '_hint_optimized_' + '_' + transfer_name + '.pickle'
    perform_transfer_knowledge(net,
                               donor_net,
                               transfer_loader=train_loader,
                               transfer_method='hint_optimized',
                               output_path=output_path,
                               iters=[iters],
                               learning_rates=[0.0001])
    evaluate_model_retrieval(net=Cifar_Tiny(num_classes=10),
                             path=output_path,
                             result_path=results_path)
Esempio n. 4
0
def run_transfer(
        learning_rates=(0.001, ), epochs=(10, ), decay=0.7, init_weight=100):
    torch.manual_seed(12345)
    print(init_weight, decay)
    student_layers, teacher_layers, loss_params, T = (3, 2, 1, 0), (3, 2, 1,
                                                                    0), {}, 2

    # Output paths
    output_path = 'models/proposed.model'
    results_path = 'results/proposed'

    # Load a pre-trained teacher network
    student_net = Cifar_Very_Tiny(10)

    # Load the teacher model
    teacher_net = Cifar_Tiny(num_classes=10)
    load_model(teacher_net, 'models/aux_pkt.model')

    kernel_parameters = {
        'student': 'combined',
        'teacher': 'combined',
        'loss': 'combined'
    }
    train_loader, test_loader, train_loader_raw = cifar10_loader(
        batch_size=128)

    # Move the models into GPU
    student_net.cuda()
    teacher_net.cuda()

    np.random.seed(1)

    cur_weight = init_weight
    for cur_epoch, cur_lr in zip(epochs, learning_rates):
        print("Running for ", cur_epoch, " epochs with lr = ", cur_lr)
        for i in range(cur_epoch):
            print(cur_weight)
            weights = (1, cur_weight, cur_weight, cur_weight)
            prob_transfer(student_net,
                          teacher_net,
                          train_loader,
                          epochs=1,
                          lr=cur_lr,
                          teacher_layers=teacher_layers,
                          student_layers=student_layers,
                          layer_weights=weights,
                          kernel_parameters=kernel_parameters,
                          loss_params=loss_params)
            cur_weight = cur_weight * decay

    save_model(student_net, output_path)
    print("Model saved at ", output_path)

    # Perform the evaluation

    evaluate_model_retrieval(net=student_net,
                             path='',
                             result_path=results_path + '_retrieval.pickle',
                             layer=3)
    evaluate_model_retrieval(net=student_net,
                             path='',
                             result_path=results_path + '_retrieval_e.pickle',
                             layer=3,
                             metric='l2')