Ejemplo n.º 1
0
def proto_test(icarl, protoset, iter_group, mixing, loader):
    print('Constructing protoset')
    protoset = icarl.construct_proto(iter_group, mixing, loader, protoset)
    print('Complete protoset')
    print('Testing')
    testset = utils_data.MyDataset(path['work_path'], 0, 2)
    testloader = DataLoader(testset,
                            batch_size=param['batch_size'],
                            shuffle=False)
    feature_mem, label_mem = icarl.feature_extract(testloader)
    result = icarl.classify(protoset, feature_mem, label_mem, iter_group)
    print('Complete test')
    return result, protoset
Ejemplo n.º 2
0
Archivo: main.py Proyecto: banziFD/il
def test(param, path, n):
    result_mem = list()
    for iter_group in range(n):
        protoset_name = path['work_path'] + '/protoset_{}'.format(iter_group)
        model_name = path['work_path'] + '/model_{}'.format(iter_group)
        with open(protoset_name, 'rb') as f:
            protoset = pickle.load(f)
            f.close()
        icarl = torch.load(model_name)
        if (icarl.gpu):
            icarl = icarl.cuda()
        print('Testing_{}'.format(iter_group))
        testset = utils_data.MyDataset(path['work_path'], iter_group, 2)
        testloader = DataLoader(testset,
                                batch_size=param['batch_size'],
                                shuffle=False)
        current_result = list()
        feature_mem, label_mem = icarl.feature_extract(testloader)
        result = icarl.classify(protoset, feature_mem, label_mem, iter_group)
        current_result.append(result)
        print('Complete test')
        result_mem.append(current_result)
    return result_mem
Ejemplo n.º 3
0
Archivo: main.py Proyecto: banziFD/il
def train_model(param, path, mixing, label_dict, n):
    ### Start of the main algorithm ###
    print('apply training algorithm...')
    # Model initialization
    icarl = utils_icarl.iCaRL(param, label_dict)
    loss_fn = torch.nn.BCELoss(size_average=False)

    # Recording traing process in log file
    log = open(path['work_path'] + '/log.txt', 'ab', 0)
    log.write('epoch time training_loss validation_loss \n'.encode())

    # Training algorithm
    for iter_group in range(n):  #nb_group
        # Training tools
        optimizer = torch.optim.Adam(icarl.parameters(),
                                     lr=param['lr'],
                                     weight_decay=param['wght_decay'])
        # scheduler = MultiStepLR(optimizer, milestones = lr_milestones, gamma = lr_factor)
        # Loading protoset
        if (iter_group == 0):
            protoset = dict()
            icarl_pre = None
        else:
            protoset_name = path['work_path'] + '/protoset_{}'.format(
                iter_group - 1)
            icarl_pre_name = path['work_path'] + '/model_{}'.format(
                iter_group - 1)
            with open(protoset_name, 'rb') as f:
                protoset = pickle.load(f)
                f.close()
            icarl_pre = torch.load(icarl_pre_name)
        # Loading trainging data by group
        data = utils_data.MyDataset(path['work_path'], iter_group, 0, protoset)
        loader = DataLoader(data, batch_size=param['batch_size'], shuffle=True)
        # Loading validation data by group
        data_val = utils_data.MyDataset(path['work_path'], iter_group, 1)
        loader_val = DataLoader(data_val,
                                batch_size=param['batch_size'],
                                shuffle=True)
        for epoch in range(param['epochs']):
            start = time.time()
            # Train
            error_train, error_val = 0, 0
            error_train = utils_icarl.train(icarl, icarl_pre, optimizer,
                                            loss_fn, loader)
            # Validate
            error_val = utils_icarl.val(icarl, icarl_pre, loss_fn, loader_val)
            # Print monitor info
            current_line = [
                epoch,
                time.time() - start, error_train / 600,
                error_val / param['nb_val']
            ]
            print(current_line)
            current_line = str(current_line)[1:-1] + '\n'
            log.write(current_line.encode())
            print('complete {}% on group {}'.format(
                (epoch + 1) * 100 / param['epochs'], iter_group))
        # Save model every group_iter for babysitting model
        icarl_copy = copy.deepcopy(icarl)
        if (icarl.gpu):
            icarl = icarl.cuda()
        protoset = icarl.construct_proto(iter_group, mixing, loader, protoset)
        torch.save(icarl_copy,
                   path['work_path'] + '/model_{}'.format(iter_group))
        protoset_name = path['work_path'] + '/protoset_{}'.format(iter_group)
        with open(protoset_name, 'wb') as f:
            pickle.dump(protoset, f)
            f.close()
Ejemplo n.º 4
0
log = open(work_path + '/log.txt', 'ab', 0)
log.write('epoch time training_loss validation_loss \n'.encode())

for iter_group in range(2):  #nb_group
    # Loading protoset
    if (iter_group == 0):
        protoset = dict()
    else:
        protoset_name = work_path + '/protoset{}'.format(iter_group - 1)
        with open(protoset_name, 'rb') as f:
            protoset = pickle.load(f)
            f.close()

    # Loading trainging data by group
    data = utils_data.MyDataset(work_path,
                                iter_group,
                                val=False,
                                protoset=protoset)
    loader = DataLoader(data, batch_size=batch_size, shuffle=True)
    # loading validation data by group
    data_val = utils_data.MyDataset(work_path, iter_group, True)
    loader_val = DataLoader(data_val, batch_size=batch_size, shuffle=True)
    # known_mask
    known = Variable(icarl.known.clone(), requires_grad=False)
    # unknown_mask
    unknown = Variable(icarl.unknown.clone(), requires_grad=False)
    for epoch in range(param['epochs']):
        scheduler.step()
        start = time.time()
        error_train, error_val = 0, 0
        for step, (x, y, x_orig) in enumerate(loader):
            x = Variable(x)
Ejemplo n.º 5
0
    'nb_proto': nb_proto,
    'epochs': epochs,
    'lr': lr,
    'lr_milestones': lr_milestones,
    'lr_factor': lr_factor,
    'gpu': gpu,
    'wght_decay': wght_decay
}
########################################

######### Paths  ##########
# Working space
# dataset_path = "/mnt/e/dataset/cifar-10-python"
# work_path = '/mnt/e/ilex'
# test_path = '/mnt/e/ilte'

dataset_path = "/home/spyisflying/dataset/cifar/cifar-10-batches-py"
work_path = '/home/spyisflying/ilex'
test_path = '/home/spyisflying/ilte'
###########################
mixing = [(4, 7), (8, 5), (6, 2), (1, 3), (9, 0)]
#utils_data.prepare_files_sample(dataset_path, work_path, mixing, nb_group, nb_cl, nb_val)

for epoch in range(20):
    protoset = pickle.load(
        open(test_path + '/protoset_0_{}'.format(epoch), 'rb'))
    testset = utils_data.MyDataset(test_path, 0, 2, protoset)
    model = torch.load(test_path + '/model0_{}'.format(epoch))
    #loader = DataLoader(testset, batch_size = 16, shuffle = False)
    #model.feature_extract(loader, test_path)
    model.classify(protoset, test_path, 0, epoch)
Ejemplo n.º 6
0
                            batch_size=param['batch_size'],
                            shuffle=False)
    feature_mem, label_mem = icarl.feature_extract(testloader)
    result = icarl.classify(protoset, feature_mem, label_mem, iter_group)
    print('Complete test')
    return result, protoset


if __name__ == '__main__':
    param = set_param()
    path = set_path()
    label_dict, mixing = set_data(param, path)
    #     train_model(param, path, mixing)
    for iter_group in range(1):
        protoset = dict()
        data = utils_data.MyDataset(path['work_path'], iter_group, 0, protoset)
        loader = DataLoader(data, batch_size=param['batch_size'], shuffle=True)
        result_mem = []
        for epoch in [59]:  #param['epochs']
            start = time.time()
            icarl = torch.load(path['work_path'] +
                               '/model_{}_{}'.format(iter_group, epoch))
            if (icarl.gpu):
                icarl = icarl.cuda()
            current_result = list()
            for i in range(70):
                result, protoset_ = proto_test(icarl, protoset, iter_group,
                                               mixing, loader)
                current_result.append(result)
#                 if((i == 0 or i == 5 or i == 9) and (epoch % 10 == 0)):
#                     with open(path['work_path'] + '/protoset_{}_{}'.format(epoch, i), 'wb') as f: