コード例 #1
0
def set_and_save_salina_proceed_1x1_data():
    dataset = HSI.HSIDataSet('salina')
    dataset.get_data()
    dataset.get_labels()
    print('data shape is: ', dataset.data.shape)  # 145,145,200
    print('label shape is: ', dataset.labels.shape)  # 145, 145

    data, labels = np.array(dataset.data), np.array(dataset.labels)
    process = HSI_preprocess(name='salina', dst_shape=(512, 217, 224))
    data_add_channel = process.add_channel(data)
    data_1x1 = process.scale_to1(data_add_channel)
    sio.savemat(dataset.dir + '/salina_1x1_mean.mat', {
        'data': data_1x1,
        'labels': labels
    })
コード例 #2
0
def set_and_save_salina_proceed_data():
    dataset = HSI.HSIDataSet('salina')
    dataset.get_data()
    dataset.get_labels()
    print('data shape is: ', dataset.data.shape)  # 145,145,200
    print('label shape is: ', dataset.labels.shape)  # 145, 145

    data, labels = np.array(dataset.data), np.array(dataset.labels)
    process = HSI_preprocess(name='salina', dst_shape=(512, 217, 224))
    data_add_channel = process.add_channel(data)

    data_3x3_mean = process.get_mean_data(data=data_add_channel,
                                          patch_size=3,
                                          var=False)
    data_3x3_mean = process.scale_to1(data_3x3_mean)
    sio.savemat(dataset.dir + '/salina_3x3_mean.mat', {
        'data': data_3x3_mean,
        'labels': labels
    })

    data_5x5_mean = process.get_mean_data(data=data_add_channel,
                                          patch_size=5,
                                          var=False)
    data_5x5_mean = process.scale_to1(data_5x5_mean)
    sio.savemat(dataset.dir + '/salina_5x5_mean.mat', {
        'data': data_5x5_mean,
        'labels': labels
    })

    data_3x3_mean_std = process.get_mean_data(data=data_add_channel,
                                              patch_size=3,
                                              var=True)
    data_3x3_mean_std = process.scale_to1(data_3x3_mean_std)
    sio.savemat(dataset.dir + '/salina_3x3_mean_std.mat', {
        'data': data_3x3_mean_std,
        'labels': labels
    })

    data_5x5_mean_std = process.get_mean_data(data=data_add_channel,
                                              patch_size=5,
                                              var=True)
    data_5x5_mean_std = process.scale_to1(data_5x5_mean_std)
    sio.savemat(dataset.dir + '/salina_5x5_mean_std.mat', {
        'data': data_5x5_mean_std,
        'labels': labels
    })
def test_train_test_split():
    import time
    import hyperspectral_datasets as HSI
    time1 = time.time()

    ip = HSI.HSIDataSet('indian_pines')
    ip.get_data()
    ip.get_labels()
    label_unique = [2, 3, 5, 6, 8, 10, 11, 12, 14]
    (train_label, train_index, train_data), (test_label, test_index, test_data) = train_test_split(ip.data, ip.labels,
                                                                                                   label_unique=label_unique,
                                                                                                   train=200)
    print('train_label.shape : ', train_label.shape)
    print('train_index.shape : ', train_index.shape)
    print('train_data.shape : ', train_data.shape)
    print('test_label.shape : ', test_label.shape)
    print('test_index.shape : ', test_index.shape)
    print('test_data.shape : ', test_data.shape)
    time2 = time.time()
    print('use time : ', time2 - time1)
def test_split_by_ratio():
    import time
    import hyperspectral_datasets as HSI
    time1 = time.time()

    ip = HSI.HSIDataSet('indian_pines')
    ip.get_data()
    ip.get_labels()
    ip.pos = get_lables_pos(ip.labels, label_unique=list(range(1, 17)))
    train_label_pos, test_label_pos = divide_labels_by_ratio(ip.pos, 0.5)
    (train_label, train_index, train_data), (test_label, test_index, test_data) = get_data_divided(
        train_label_pos, test_label_pos, ip.data)
    print('train_label.shape : ', train_label.shape)
    print('train_index.shape : ', train_index.shape)
    print('train_data.shape : ', train_data.shape)
    print('test_label.shape : ', test_label.shape)
    print('test_index.shape : ', test_index.shape)
    print('test_data.shape : ', test_data.shape)
    time2 = time.time()
    print('use time : ', time2 - time1)
def test_split_by_nums():
    import time
    import hyperspectral_datasets as HSI
    time1 = time.time()

    ip = HSI.HSIDataSet('indian_pines')
    ip.get_data()
    ip.get_labels()
    ip.pos = get_lables_pos(ip.labels, label_unique=[
                            2, 3, 5, 8, 10, 11, 12, 14])
    train_label_pos, test_label_pos = divide_labels_by_nums(ip.pos, 200)
    (train_label, train_index, traizn_data), (test_label, test_index, test_data) = get_data_divided(
        train_label_pos, test_label_pos, ip.data)
    print('train_label.shape : ', train_label.shape)
    print('train_index.shape : ', train_index.shape)
    print('train_data.shape : ', train_data.shape)
    print('test_label.shape : ', test_label.shape)
    print('test_index.shape : ', test_index.shape)
    print('test_data.shape : ', test_data.shape)
    time2 = time.time()
    print('use time : ', time2 - time1)
def test_train_test_split():
    time1 = time.time()

    dataset = HSI.HSIDataSet('indian_pines')
    dataset.get_data()
    dataset.get_labels()
    label_unique = [2, 3, 5, 6, 8, 10, 11, 12, 14]
    (train_label, train_index,
     train_data), (test_label, test_index,
                   test_data) = train_test_split(dataset.data,
                                                 dataset.labels,
                                                 label_unique=label_unique,
                                                 train=200)
    print('train_label.shape : ', train_label.shape)
    print('train_index.shape : ', train_index.shape)
    print('train_data.shape : ', train_data.shape)
    print('test_label.shape : ', test_label.shape)
    print('test_index.shape : ', test_index.shape)
    print('test_data.shape : ', test_data.shape)
    time2 = time.time()
    print('use time : ', time2 - time1)
def test_split_by_ratio():
    time1 = time.time()

    dataset = HSI.HSIDataSet('indian_pines')
    dataset.get_data()
    dataset.get_labels()
    dataset.pos = get_lables_pos(dataset.labels,
                                 label_unique=[2, 3, 5, 6, 8, 10, 11, 12, 14])
    train_label_pos, test_label_pos = divide_labels_by_ratio(dataset.pos, 0.5)
    (train_label, train_index,
     train_data), (test_label, test_index,
                   test_data) = get_data_divided(train_label_pos,
                                                 test_label_pos, dataset.data)
    print('train_label.shape : ', train_label.shape)
    print('train_index.shape : ', train_index.shape)
    print('train_data.shape : ', train_data.shape)
    print('test_label.shape : ', test_label.shape)
    print('test_index.shape : ', test_index.shape)
    print('test_data.shape : ', test_data.shape)
    time2 = time.time()
    print('use time : ', time2 - time1)
def set_and_save_indian_pines_5d_data(patch_size=5, is_rotate=True):
    dataset = HSI.HSIDataSet('indian_pines')
    dataset.get_data()
    dataset.get_labels()
    print('data shape is: ', dataset.data.shape)  # 145,145,200
    print('label shape is: ', dataset.labels.shape)  # 145, 145

    data, labels = np.array(dataset.data), np.array(dataset.labels)
    dataset_process = HSI_preprocess(
        name='indian_pines', dst_shape=(145, 145, 224))
    data = dataset_process.add_channel(data)
    data = dataset_process.data_add_zero(data)
    data_scale_to1 = data / np.max(data)
    data_5d = dataset_process.get_patch_data(
        data_scale_to1, patch_size=patch_size, is_rotate=is_rotate)

    [h, w, n_channels] = data_scale_to1.shape
    n_samples = h*w*4 if is_rotate else h*w
    if is_rotate:
        h5file_name = dataset.dir + \
            '/indian_5d_patch_{}_with_rotate.h5'.format(patch_size)
    else:
        h5file_name = dataset.dir + '/indian_5d_patch_{}.h5'.format(patch_size)

    file = h5py.File(h5file_name, 'w')
    file.create_dataset('data', shape=(n_samples, n_channels, patch_size, patch_size, 1),
                        chunks=(1024, n_channels, patch_size, patch_size, 1), dtype=np.float32,
                        maxshape=(None, n_channels, patch_size, patch_size, 1))
    file.create_dataset('labels', data=labels)
    file.close()

    with h5py.File(h5file_name, 'a') as h5f:
        for i, (x, y, patch) in enumerate(data_5d):
            if is_rotate:
                h5f['data'][4*i:4*(i+1)] = patch[:, :, :, :, None]
            else:
                h5f['data'][i] = patch[None, :, :, :, None]