Exemplo n.º 1
0
def cross_subject(data, label, session_id, category_number, batch_size, iteration, lr, momentum, log_interval):
    # cross-subject, for 3 sessions, 1-14 as sources, 15 as target
    one_session_data, one_session_label = copy.deepcopy(data[session_id]), copy.deepcopy(label[session_id])
    target_data, target_label = one_session_data.pop(), one_session_label.pop()
    source_data, source_label = copy.deepcopy(one_session_data), copy.deepcopy(one_session_label.copy())
    # print(len(source_data))
    source_data_comb = source_data[0]
    source_label_comb = source_label[0]
    for j in range(1, len(source_data)):
        source_data_comb = np.vstack((source_data_comb, source_data[j]))
        source_label_comb = np.vstack((source_label_comb, source_label[j]))
    if bn == 'ele':
        source_data_comb = utils.norminy(source_data_comb)
        target_data = utils.norminy(target_data)
    elif bn == 'sample':
        source_data_comb = utils.norminx(source_data_comb)
        target_data = utils.norminx(target_data)
    elif bn == 'global':
        source_data_comb = utils.normalization(source_data_comb)
        target_data = utils.normalization(target_data)
    elif bn == 'none':
        pass
    else:
        pass
    # source_data_comb = utils.norminy(source_data_comb)
    # target_data = utils.norminy(target_data)
    source_loader = torch.utils.data.DataLoader(dataset=utils.CustomDataset(source_data_comb, source_label_comb),
                                                            batch_size=batch_size,
                                                            shuffle=True,
                                                            drop_last=True)
    # source_loaders = []
    # for j in range(len(source_data)):
    #     source_loaders.append(torch.utils.data.DataLoader(dataset=utils.CustomDataset(source_data[j], source_label[j]),
    #                                                         batch_size=batch_size,
    #                                                         shuffle=True,
    #                                                         drop_last=True))
    target_loader = torch.utils.data.DataLoader(dataset=utils.CustomDataset(target_data, target_label),
                                                            batch_size=batch_size, 
                                                            shuffle=True, 
                                                            drop_last=True)
    model = DANNet(model=models.DAN(pretrained=False, number_of_category=category_number),
                source_loader=source_loader,
                target_loader=target_loader,
                batch_size=batch_size,
                iteration=iteration,
                lr=lr,
                momentum=momentum,
                log_interval=log_interval)
    # print(model.__getModel__())
    acc = model.train()
    return acc
Exemplo n.º 2
0
def cross_session(data, label, subject_id, category_number, batch_size, iteration, lr, momentum, log_interval):
    target_data, target_label = copy.deepcopy(data[2][subject_id]), copy.deepcopy(label[2][subject_id])
    source_data, source_label = [copy.deepcopy(data[0][subject_id]), copy.deepcopy(data[1][subject_id])], [copy.deepcopy(label[0][subject_id]), copy.deepcopy(label[1][subject_id])]
    # one_sub_data, one_sub_label = data[i], label[i]
    # target_data, target_label = one_session_data.pop(), one_session_label.pop()
    # source_data, source_label = one_session_data.copy(), one_session_label.copy()
    # print(len(source_data))
    source_data_comb = np.vstack((source_data[0], source_data[1]))
    source_label_comb = np.vstack((source_label[0], source_label[1]))
    for j in range(1, len(source_data)):
        source_data_comb = np.vstack((source_data_comb, source_data[j]))
        source_label_comb = np.vstack((source_label_comb, source_label[j]))
    if bn == 'ele':
        source_data_comb = utils.norminy(source_data_comb)
        target_data = utils.norminy(target_data)
    elif bn == 'sample':
        source_data_comb = utils.norminx(source_data_comb)
        target_data = utils.norminx(target_data)
    elif bn == 'global':
        source_data_comb = utils.normalization(source_data_comb)
        target_data = utils.normalization(target_data)
    elif bn == 'none':
        pass
    else:
        pass
    # source_data_comb = utils.norminy(source_data_comb)
    # target_data = utils.norminy(target_data)

    source_loader = torch.utils.data.DataLoader(dataset=utils.CustomDataset(source_data_comb, source_label_comb),
                                                            batch_size=batch_size,
                                                            shuffle=True,
                                                            drop_last=True)
    target_loader = torch.utils.data.DataLoader(dataset=utils.CustomDataset(target_data, target_label),
                                                            batch_size=batch_size, 
                                                            shuffle=True, 
                                                            drop_last=True)
    model = DANNet(model=models.DAN(pretrained=False, number_of_category=category_number),
                source_loader=source_loader,
                target_loader=target_loader,
                batch_size=batch_size,
                iteration=iteration,
                lr=lr,
                momentum=momentum,
                log_interval=log_interval)
    # print(model.__getModel__())
    acc = model.train()
    return acc
Exemplo n.º 3
0
    for dataset_name in dataset_name_all:
        print('Dataset name: ', dataset_name)
        data, label = utils.load_data(dataset_name)
        for bn in bn_all:
            print('Normalization type: ', bn)
            if bn == 'ele':
                data_tmp = copy.deepcopy(data)
                for i in range(len(data_tmp)):
                    for j in range(len(data_tmp[0])):
                        data_tmp[i][j] = utils.norminy(data_tmp[i][j])
            elif bn == 'sample':
                data_tmp = copy.deepcopy(data)
                for i in range(len(data_tmp)):
                    for j in range(len(data_tmp[0])):
                        data_tmp[i][j] = utils.norminx(data_tmp[i][j])
            elif bn == 'global':
                data_tmp = copy.deepcopy(data)
                for i in range(len(data_tmp)):
                    for j in range(len(data_tmp[0])):
                        data_tmp[i][j] = utils.normalization(data_tmp[i][j])
            elif bn == 'none':
                data_tmp = copy.deepcopy(data)
            else:
                pass

            trial_total, category_number, _ = utils.get_number_of_label_n_trial(
                dataset_name)

            # training settings
            batch_size = 32