# read data into numpy array mat = scipy.io.loadmat('PaviaU.mat') img = mat['paviaU'] # create a hyperspectral dataset object from the numpy array hypData = data.HypImg(img) # pre-process data to make the model easier to train hypData.pre_process('minmax') # create data iterator objects for training and validation using the pre-processed data trainSamples = 200000 valSamples = 100 dataTrain = data.Iterator( dataSamples=hypData.spectraPrep[:trainSamples, :], targets=hypData.spectraPrep[:trainSamples, :], batchSize=1000) dataVal = data.Iterator( dataSamples=hypData.spectraPrep[trainSamples:trainSamples + valSamples, :], targets=hypData.spectraPrep[trainSamples:trainSamples + valSamples, :]) # shuffle training data dataTrain.shuffle() # setup a fully-connected autoencoder neural network with 3 encoder layers net_mlp = autoencoder.mlp_1D_network(inputSize=hypData.numBands, encoderSize=[50, 30, 10, 3], activationFunc='relu', weightInitOpt='truncated_normal', tiedWeights=None,
# get indices for training and validation data trainSamples = 50 # per class valSamples = 15 # per class train_indices = [] for i in range(1, 10): train_indices += np.nonzero( hypData.labels == i)[0][:trainSamples].tolist() val_indices = [] for i in range(1, 10): val_indices += np.nonzero( hypData.labels == i)[0][trainSamples:trainSamples + valSamples].tolist() # create data iterator objects for training and validation using the pre-processed data dataTrain = data.Iterator( dataSamples=hypData.spectraPrep[train_indices, :], targets=hypData.labelsOnehot[train_indices, :], batchSize=50) dataVal = data.Iterator(dataSamples=hypData.spectraPrep[val_indices, :], targets=hypData.labelsOnehot[val_indices, :]) # shuffle training data dataTrain.shuffle() # setup a cnn classifier with 3 convolutional layers and 2 fully-connected layers net = classifier.cnn_1D_network(inputSize=hypData.numBands, numClasses=9, convFilterSize=[20, 10, 10], convNumFilters=[10, 10, 10], convStride=[1, 1, 1], fcSize=[20, 20], activationFunc='relu',