import sys
sys.path.insert(0, '..')
from deephyp import autoencoder
from deephyp import data

if __name__ == '__main__':

    # download dataset (if already downloaded, comment this out)
    #urllib.urlretrieve( 'http://www.ehu.eus/ccwintco/uploads/e/ee/PaviaU.mat', os.path.join(os.getcwd(),'PaviaU.mat'), reporthook )

    # read data into numpy array
    mat = scipy.io.loadmat('PaviaU.mat')
    img = mat['paviaU']

    # create a hyperspectral dataset object from the numpy array
    hypData = data.HypImg(img)

    # pre-process data to make the model easier to train
    hypData.pre_process('minmax')

    # create data iterator objects for training and validation using the pre-processed data
    trainSamples = 200000
    valSamples = 100
    dataTrain = data.Iterator(
        dataSamples=hypData.spectraPrep[:trainSamples, :],
        targets=hypData.spectraPrep[:trainSamples, :],
        batchSize=1000)
    dataVal = data.Iterator(
        dataSamples=hypData.spectraPrep[trainSamples:trainSamples +
                                        valSamples, :],
        targets=hypData.spectraPrep[trainSamples:trainSamples + valSamples, :])
Exemplo n.º 2
0
    # download dataset and ground truth (if already downloaded, comment this out)
    urlretrieve('http://www.ehu.eus/ccwintco/uploads/e/ee/PaviaU.mat',
                os.path.join(os.getcwd(), 'PaviaU.mat'), reporthook)
    urlretrieve('http://www.ehu.eus/ccwintco/uploads/5/50/PaviaU_gt.mat',
                os.path.join(os.getcwd(), 'PaviaU_gt.mat'), reporthook)

    # read data into numpy array
    mat = scipy.io.loadmat('PaviaU.mat')
    img = mat['paviaU']

    # read labels into numpy array
    mat_gt = scipy.io.loadmat('PaviaU_gt.mat')
    img_gt = mat_gt['paviaU_gt']

    # create a hyperspectral dataset object from the numpy array
    hypData = data.HypImg(img, labels=img_gt)

    # pre-process data to make the model easier to train
    hypData.pre_process('minmax')

    # get indices for training and validation data
    trainSamples = 50  # per class
    valSamples = 15  # per class
    train_indices = []
    for i in range(1, 10):
        train_indices += np.nonzero(
            hypData.labels == i)[0][:trainSamples].tolist()
    val_indices = []
    for i in range(1, 10):
        val_indices += np.nonzero(
            hypData.labels == i)[0][trainSamples:trainSamples +