Example #1
0
def main():
    img = mpimg.imread(
        '/home/jjordening/git/thunderhill_data/dataset_sim_001_km_320x160/IMG/center_2017_03_07_07_21_54_311.jpg'
    )
    h, w = img.shape[:2]
    src = np.float32([[w / 2 - 57, h / 2], [w / 2 + 57, h / 2], [w + 140, h],
                      [-140, h]])
    dst = np.float32([[w / 4, 0], [w * 3 / 4, 0], [w * 3 / 4, h], [w / 4, h]])
    M = cv2.getPerspectiveTransform(src, dst)
    invM = cv2.getPerspectiveTransform(dst, src)
    transform = functools.partial(perspectiveTransform, M=M.copy())
    #plt.imshow(preprocessImage(img, transform))
    #plt.show()

    #showSamplesCompared(img, transform, '', '', '')
    plt.xkcd()
    np.random.seed(0)
    #data = pd.read_csv('/home/jjordening/git/thunderhill_data/dataset_sim_000_km_few_laps/driving_log.csv',
    #                   header = None, names=['center','left', 'right', 'steering','throttle', 'brake', 'speed', 'position', 'orientation'])
    #data['positionX'], data['positionY'], data['positionZ'] = data['position'].apply(retrieveVectors)
    #data['orientationX'], data['orientationY'], data['orientationZ'] = data['orientation'].apply(retrieveVectors)
    #data['center'] = '/home/jjordening/git/thunderhill_data/dataset_sim_000_km_few_laps/'+data['center'].apply(lambda x: x.strip())
    data1 = pd.read_csv(
        '/home/jjordening/git/thunderhill_data/dataset_sim_001_km_320x160/driving_log.csv',
        header=None,
        names=[
            'center', 'left', 'right', 'steering', 'throttle', 'brake',
            'speed', 'position', 'orientation'
        ])
    data1[
        'center'] = '/home/jjordening/git/thunderhill_data/dataset_sim_001_km_320x160/' + data1[
            'center'].apply(lambda x: x.strip())
    data1[['positionX', 'positionY',
           'positionZ']] = data1['position'].apply(retrieveVectors)
    data1[['orientationX', 'orientationY',
           'orientationZ']] = data1['orientation'].apply(retrieveVectors)
    data2 = pd.read_csv(
        '/home/jjordening/git/thunderhill_data/dataset_sim_002_km_320x160_recovery/driving_log.csv',
        header=None,
        names=[
            'center', 'left', 'right', 'steering', 'throttle', 'brake',
            'speed', 'position', 'orientation'
        ])
    data2[
        'center'] = '/home/jjordening/git/thunderhill_data/dataset_sim_002_km_320x160_recovery/' + data2[
            'center'].apply(lambda x: x.strip())
    data2[['positionX', 'positionY',
           'positionZ']] = data2['position'].apply(retrieveVectors)
    data2[['orientationX', 'orientationY',
           'orientationZ']] = data2['orientation'].apply(retrieveVectors)
    #data['right'] = '../simulator/data/data/'+data['right'].apply(lambda x: x.strip())
    #data['left'] = '../simulator/data/data/'+data['left'].apply(lambda x: x.strip())
    angles = []
    images = []
    """data2 = pd.read_csv('../simulator/simulator-linux/driving_log.csv', header = None, names=['center','left', 'right', 'steering',
                                                               'throttle', 'break', 'speed'])
    data = data.append(data2)"""
    dataNew = pd.DataFrame()
    offset = 0

    print(data1['positionX'])
    for dat in [data1, data2]:
        angles.extend(dat['steering'].values)
        for row in dat.iterrows():
            dat.loc[row[0], 'angleIndex'] = row[0] + offset
            images.append(
                preprocessImage(mpimg.imread(row[1]['center'].strip())))
            #images.append(transform(mpimg.imread(row[1]['center'].strip())))
        offset += 100
        dataNew = dataNew.append(dat.ix[100:])
    # TODO: Normalisation of position and orientation
    print(len(dataNew), dataNew.columns)
    hist, edges = np.histogram(dataNew['steering'], bins=31)
    hist = 1. / np.array([
        val if val > len(dataNew) / 30. else len(dataNew) / 30. for val in hist
    ])
    hist *= len(dataNew) / 30.
    print(hist, len(dataNew))
    dataNew['norm'] = dataNew['steering'].apply(
        lambda x: getNormFactor(x, hist, edges))
    print(dataNew['norm'].unique())
    del data1, data2

    for col in [
            'positionX', 'positionY', 'positionZ', 'orientationX',
            'orientationY', 'orientationZ'
    ]:
        vals = dataNew[col].values
        mean = np.mean(vals)
        std = np.std(vals)
        dataNew[col] -= mean
        dataNew[col] /= std
        print('%s Mean:%.3f Std:%.3f' % (col, mean, std))

    dataNew = shuffle(dataNew, random_state=0)

    #plt.show()

    dataTrain, dataTest = train_test_split(dataNew, test_size=.2)
    dataTrain, dataVal = train_test_split(dataTrain, test_size=.2)

    imShape = preprocessImage(mpimg.imread(dataTrain['center'].iloc[0])).shape
    print(imShape)

    batchSize = 256
    epochBatchSize = 4096

    trainGenerator = generateImagesFromPaths(dataTrain, batchSize, imShape,
                                             [3], transform, angles, images,
                                             True)
    t = time.time()
    trainGenerator.__next__()
    print("Time to build train batch: ", time.time() - t)
    valGenerator = generateImagesFromPaths(dataVal, batchSize, imShape, [3],
                                           transform, angles, images)
    t = time.time()
    valGenerator.__next__()
    print("Time to build validation batch: ", time.time() - t)
    stopCallback = EarlyStopping(monitor='val_loss', patience=15, min_delta=0.)
    checkCallback = ModelCheckpoint('initModel.ckpt',
                                    monitor='val_loss',
                                    save_best_only=True)
    visCallback = TensorBoard(log_dir='./logs')
    if LOADMODEL:
        endModel = load_model('initModel.h5',
                              custom_objects={'customLoss': customLoss})
        endModel.fit_generator(
            trainGenerator,
            callbacks=[stopCallback, checkCallback, visCallback],
            nb_epoch=20,
            samples_per_epoch=epochBatchSize,
            max_q_size=8,
            validation_data=valGenerator,
            nb_val_samples=len(dataVal),
            nb_worker=8,
            pickle_safe=True)
        endModel.load_weights('initModel.ckpt')
        endModel.save('model.h5')

    else:
        inpC = Input(shape=(imShape[0], imShape[1], imShape[2]),
                     name='input_1')
        xC = Convolution2D(24, 8, 8, border_mode='valid',
                           subsample=(2, 2))(inpC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xC = Convolution2D(36, 5, 5, border_mode='valid', subsample=(2, 2))(xC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xC = Convolution2D(48, 5, 5, border_mode='valid', subsample=(2, 2))(xC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xC = Convolution2D(64, 5, 5, border_mode='valid')(xC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xC = Convolution2D(64, 5, 5, border_mode='valid')(xC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xOut = Flatten()(xC)
        print(xOut.get_shape())
        """xVectorInp = Input(shape = (6,), name='input_3')
        xVector = Dense(100)(xVectorInp)
        xVector = BatchNormalization()(xVector)
        xVector = Activation('elu')(xVector)
        xVector = Dense(100)(xVector)
        xVector = BatchNormalization()(xVector)
        xVector = Activation('elu')(xVector)
        xVector = Dropout(.1)(xVector)"""

        inpAngles = Input(shape=(ANGLESFED, ), name='input_2')

        xOut = Lambda(lambda x: K.concatenate(x, axis=1))([xOut, inpAngles])
        xOut = Dense(200)(xOut)
        xOut = BatchNormalization()(xOut)
        xOut = Activation('elu')(xOut)
        xOut = Dense(100)(xOut)
        xOut = BatchNormalization()(xOut)
        xOut = Activation('elu')(xOut)
        xOut = Dense(50)(xOut)
        xOut = BatchNormalization()(xOut)
        xOut = Activation('elu')(xOut)
        xOut = Dropout(.3)(xOut)
        xOut = Dense(10)(xOut)
        xOut = BatchNormalization()(xOut)
        xOut = Activation('elu')(xOut)
        xOut = Dense(1, activation='sigmoid')(xOut)
        xOut = Lambda(lambda x: x * 2 - 1, name='output')(xOut)
        #xRec = LSTM(10)(xOut)

        endModel = Model((inpC, inpAngles), xOut)
        endModel.compile(optimizer=Adam(lr=1e-4),
                         loss=customLoss,
                         metrics=['mse', 'accuracy'])
        endModel.fit_generator(trainGenerator,
                               callbacks=[visCallback],
                               nb_epoch=5,
                               samples_per_epoch=epochBatchSize,
                               max_q_size=8,
                               nb_worker=8,
                               pickle_safe=True)
        endModel.fit_generator(
            trainGenerator,
            callbacks=[stopCallback, checkCallback, visCallback],
            nb_epoch=100,
            samples_per_epoch=epochBatchSize,
            max_q_size=8,
            validation_data=valGenerator,
            nb_val_samples=len(dataVal),
            nb_worker=8,
            pickle_safe=True)
        endModel.load_weights('initModel.ckpt')
        endModel.save('initModel.h5')

    endModel = load_model('initModel.h5',
                          custom_objects={'customLoss': customLoss})
    print(endModel.evaluate_generator(valGenerator, val_samples=len(dataVal)))
    print(
        endModel.evaluate_generator(generateImagesFromPaths(
            dataTest, batchSize, imShape, [3], transform, angles, images),
                                    val_samples=len(dataTest)))
def main():

    dataList = []
    #plt.imshow(preprocessImage(img, transform))
    #plt.show()

    #showSamplesCompared(img, transform, '', '', '')
    #plt.xkcd()
    np.random.seed(0)
    #data = pd.read_csv('/home/jjordening/data/dataset_sim_000_km_few_laps/driving_log.csv',
    #                   header = None, names=['center','left', 'right', 'steering','throttle', 'brake', 'speed', 'position', 'orientation'])
    #data['positionX'], data['positionY'], data['positionZ'] = data['position'].apply(retrieveVectors)
    #data['orientationX'], data['orientationY'], data['orientationZ'] = data['orientation'].apply(retrieveVectors)
    #data['center'] = '/home/jjordening/data/dataset_sim_000_km_few_laps/'+data['center'].apply(lambda x: x.strip())

    #data1 = pd.read_csv('/home/jjordening/data/udacity-day-01-exported-1102/output_processed.txt')
    #data1['path'] = '/home/jjordening/data/udacity-day-01-exported-1102/'+data1['path'].apply(lambda x: x.strip())

    #data2 = pd.read_csv('/home/jjordening/data/udacity-day-01-exported-1109/output_processed.txt')
    #data2['path'] = '/home/jjordening/data/udacity-day-01-exported-1109/'+data2['path'].apply(lambda x: x.strip())

    if ALL:
        data3 = pd.read_csv('/home/jjordening/data/1538/output_processed.txt')
        data3['path'] = '/home/jjordening/data/1538/' + data3['path'].apply(
            lambda x: x.strip())
        print('data3', np.max(data3['steering']), np.min(data3['steering']))
        dataList.append(data3)

        data4 = pd.read_csv('/home/jjordening/data/1543/output_processed.txt')
        data4['path'] = '/home/jjordening/data/1543/' + data4['path'].apply(
            lambda x: x.strip())
        print('data4', np.max(data4['steering']), np.min(data4['steering']))
        dataList.append(data4)

        data5 = pd.read_csv('/home/jjordening/data/1610/output_processed.txt')
        data5['path'] = '/home/jjordening/data/1610/' + data5['path'].apply(
            lambda x: x.strip())
        print('data5', np.max(data5['steering']), np.min(data5['steering']))
        dataList.append(data5)

        data6 = pd.read_csv('/home/jjordening/data/1645/output_processed.txt')
        data6['path'] = '/home/jjordening/data/1645/' + data6['path'].apply(
            lambda x: x.strip())
        print('data6', np.max(data6['steering']), np.min(data6['steering']))
        dataList.append(data6)

        data7 = pd.read_csv('/home/jjordening/data/1702/output_processed.txt')
        data7['path'] = '/home/jjordening/data/1702/' + data7['path'].apply(
            lambda x: x.strip())
        print('data7', np.max(data7['steering']), np.min(data7['steering']))
        dataList.append(data7)

        data8 = pd.read_csv('/home/jjordening/data/1708/output_processed.txt')
        data8['path'] = '/home/jjordening/data/1708/' + data8['path'].apply(
            lambda x: x.strip())
        print('data8', np.max(data8['steering']), np.min(data8['steering']))
        dataList.append(data8)

    data9 = pd.read_csv('/home/jjordening/data/1045/output_processed.txt')
    data9['path'] = '/home/jjordening/data/1045/' + data9['path'].apply(
        lambda x: x.strip())
    print('data9', np.max(data9['steering']), np.min(data9['steering']))
    dataList.append(data9)

    data10 = pd.read_csv('/home/jjordening/data/1050/output_processed.txt')
    data10['path'] = '/home/jjordening/data/1050/' + data10['path'].apply(
        lambda x: x.strip())
    print('data10', np.max(data10['steering']), np.min(data10['steering']))
    dataList.append(data10)

    data11 = pd.read_csv('/home/jjordening/data/1426/output_processed.txt')
    data11['path'] = '/home/jjordening/data/1426/' + data11['path'].apply(
        lambda x: x.strip())
    print('data11', np.max(data11['steering']), np.min(data11['steering']))
    dataList.append(data11)

    data12 = pd.read_csv('/home/jjordening/data/1516/output_processed.txt')
    data12['path'] = '/home/jjordening/data/1516/' + data12['path'].apply(
        lambda x: x.strip())
    print('data12', np.max(data12['steering']), np.min(data12['steering']))
    dataList.append(data12)

    data13 = pd.read_csv('/home/jjordening/data/1634/outputNew.txt')
    data13['path'] = '/home/jjordening/data/1634/' + data13['path'].apply(
        lambda x: x.strip())
    print('data12', np.max(data13['steering']), np.min(data13['steering']))
    dataList.append(data13)

    print(data9['brake'].unique())
    """data3 = pd.read_csv('/home/jjordening/data/dataset_polysync_1464552951979919/output_processed.txt', header = None, 
                        names = ['path','heading','longitude','latitude','quarternion0','quarternion1','quarternion2','quarternion3','vel0','vel1',
                                'vel2','steering','throttle','brake','speed'], skiprows = 500)
    data3 = data3.ix[0:1500].append(data3.ix[2600:])
    data3 = data3.ix[-500:]
    data3['path'] = '/home/jjordening/data/dataset_polysync_1464552951979919/'+data3['path'].apply(lambda x: x.strip())
    data3['throttle'] = 0"""

    #data['right'] = '../simulator/data/data/'+data['right'].apply(lambda x: x.strip())
    #data['left'] = '../simulator/data/data/'+data['left'].apply(lambda x: x.strip())
    angles = []
    dataNew = pd.DataFrame()
    offset = 0
    #print(data3['steering'])
    #print(data1['longitude'])
    """for dat in [data3,data4,data5,data6,data7]:
        angles.extend(dat['steering'].values)
        for row in dat.iterrows():
            dat.loc[row[0], 'angleIndex'] = row[0]+ offset
            #images.append(preprocessImage(mpimg.imread(row[1]['center'].strip())))
            #images.append(transform(mpimg.imread(row[1]['center'].strip())))
        offset+=100
        dataNew = dataNew.append(dat.ix[100:])"""
    #dataNew['throttle'] = dataNew['accel'].apply(lambda x: max(x,0)/np.max(dataNew['accel']))
    for dat in dataList:
        dataNew = dataNew.append(dat.ix[30:])
        del dat

    print('Len dataNew: ', len(dataNew))
    dataNew = dataNew.loc[pd.notnull(dataNew['throttle'])]
    dataNew = dataNew.loc[pd.notnull(dataNew['brake'])]
    dataNew = dataNew.loc[pd.notnull(dataNew['steering'])]
    print('Len dataNew: ', len(dataNew))
    print(np.max(dataNew['throttle']), np.min(dataNew['throttle']))
    # TODO: Normalisation of position and orientation<
    #del data3,data4,data5,data6,data7
    print(len(dataNew), dataNew.columns)
    print(np.histogram(dataNew['throttle'], bins=31))
    hist, edges = np.histogram(dataNew['steering'], bins=31)
    print(hist, edges, len(dataNew))
    hist = 1. / np.array([
        val if val > len(dataNew) / 40. else len(dataNew) / 40. for val in hist
    ])
    hist *= len(dataNew) / 40.
    print(hist, edges, len(dataNew))
    dataNew['norm'] = dataNew['steering'].apply(
        lambda x: getNormFactor(x, hist, edges))
    print(dataNew['norm'].unique())
    print(np.min(dataNew['steering']), np.max(dataNew['steering']))
    print(np.min(dataNew['throttle']), np.max(dataNew['throttle']))
    print(np.min(dataNew['brake']), np.max(dataNew['brake']))

    dataNew['speed'] = dataNew['speed'].apply(lambda x: x / 40. - 1)

    dataNew = shuffle(dataNew, random_state=0)
    #plt.figure(1, figsize=(8,4))
    #plt.hist(dataNew['steering'], bins =31)

    #plt.show()

    dataTrain, dataTest = train_test_split(dataNew, test_size=.1)
    dataTrain, dataVal = train_test_split(dataTrain, test_size=.1)

    file = open(dataTrain['path'].iloc[0], 'rb')
    # Use the PIL raw decoder to read the data.
    #   - the 'F;16' informs the raw decoder that we are reading a little endian, unsigned integer 16 bit data.
    img = np.array(Image.frombytes('RGB', [960, 480], file.read(), 'raw'))
    file.close()

    imShape = preprocessImage(img).shape
    print(imShape)

    batchSize = 128
    epochBatchSize = 8192
    trainGenerator = generateImagesFromPaths(dataTrain, batchSize, imShape,
                                             [3], True)
    t = time.time()
    trainGenerator.__next__()
    print("Time to build train batch: ", time.time() - t)
    valGenerator = generateImagesFromPaths(dataVal, batchSize, imShape, [3])
    t = time.time()
    valGenerator.__next__()
    print("Time to build validation batch: ", time.time() - t)
    stopCallback = EarlyStopping(monitor='val_loss',
                                 patience=20,
                                 min_delta=0.01)
    checkCallback = ModelCheckpoint('psyncModel.ckpt',
                                    monitor='val_loss',
                                    save_best_only=True)
    visCallback = TensorBoard(log_dir='./logs/%d' % int(time.time()),
                              histogram_freq=0,
                              write_graph=True,
                              write_images=True)
    model = load_model('psyncModelBase.h5',
                       custom_objects={'customLoss': customLoss})
    name = 'psyncPosNet%d.h5' % time.time()
    if LOADMODEL:
        endModel = load_model('psyncPosNet.h5',
                              custom_objects={'customLoss': customLoss})
        endModel.fit_generator(
            trainGenerator,
            callbacks=[stopCallback, checkCallback, visCallback],
            nb_epoch=100,
            samples_per_epoch=epochBatchSize,
            max_q_size=8,
            validation_data=valGenerator,
            nb_val_samples=len(dataVal),
            nb_worker=8,
            pickle_safe=True)
        endModel.load_weights('psyncModel.ckpt')
        endModel.save(name)

    else:
        inpC = Input(shape=(imShape[0], imShape[1], imShape[2]),
                     name='inputImg')
        xC = Convolution2D(24,
                           8,
                           8,
                           border_mode='valid',
                           subsample=(2, 2),
                           name='conv1')(inpC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xC = Convolution2D(36,
                           5,
                           5,
                           border_mode='valid',
                           subsample=(2, 2),
                           name='conv2')(xC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xC = Convolution2D(48,
                           5,
                           5,
                           border_mode='valid',
                           subsample=(2, 2),
                           name='conv3')(xC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xC = Convolution2D(64, 5, 5, border_mode='valid', name='conv4')(xC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xC = Convolution2D(
            64,
            5,
            5,
            border_mode='valid',
            name='conv5',
        )(xC)
        xC = BatchNormalization()(xC)
        xC = Activation('elu')(xC)
        print(xC.get_shape())
        xOut = Flatten()(xC)
        print(xOut.get_shape())

        #Cut for transfer learning is here:
        speedInp = Input(shape=(1, ), name='inputSpeed')

        xOut = Lambda(lambda x: K.concatenate(x, axis=1))([xOut, speedInp])
        xOut = Dense(200)(xOut)
        xOut = BatchNormalization()(xOut)
        xOut = Activation('elu')(xOut)
        xOut = Dense(200)(xOut)
        xOut = BatchNormalization()(xOut)
        xEnd = Activation('elu')(xOut)

        xOutSteer = Dense(100)(xEnd)
        xOutSteer = BatchNormalization()(xOutSteer)
        xOutSteer = Activation('elu')(xOutSteer)
        xOutSteer = Dropout(.2)(xOutSteer)
        xOutSteer = Dense(30)(xOutSteer)
        xOutSteer = BatchNormalization()(xOutSteer)
        xOutSteer = Activation('elu')(xOutSteer)
        xOutSteer = Dense(1, activation='sigmoid')(xOutSteer)
        xOutSteer = Lambda(lambda x: x * 10 - 5, name='outputSteer')(xOutSteer)

        xOutThr = Dense(100, name='thr1')(xEnd)
        xOutThr = BatchNormalization(name='thr2')(xOutThr)
        xOutThr = Activation('elu')(xOutThr)
        xOutThr = Dropout(.2)(xOutThr)
        xOutThr = Dense(30, name='thr3')(xOutThr)
        xOutThr = BatchNormalization(name='thr4')(xOutThr)
        xOutThr = Activation('elu')(xOutThr)
        xOutThr = Dense(1, activation='sigmoid', name='thr5')(xOutThr)
        xOutThr = Lambda(lambda x: x * 2 - 1, name='outputThr')(xOutThr)

        xOutPos = Dropout(.3)(xEnd)
        xOutPos = Dense(1, activation='sigmoid', name='pos5')(xOutPos)
        xOutPos = Lambda(lambda x: x * 2 - 1, name='outputPos')(xOutPos)

        endModel = Model((inpC, speedInp), (xOutSteer, xOutThr, xOutPos))
        endModel.compile(optimizer=Adam(lr=1e-4), loss='mse', metrics=['mse'])
        #endModel.fit_generator(trainGenerator, callbacks = [visCallback],
        #                       nb_epoch=50, samples_per_epoch=epochBatchSize,
        #                       max_q_size=24, nb_worker=8, pickle_safe=True)
        endModel.fit_generator(
            trainGenerator,
            callbacks=[stopCallback, checkCallback, visCallback],
            nb_epoch=30,
            samples_per_epoch=epochBatchSize,
            nb_worker=8,
            pickle_safe=True)
        endModel.load_weights('psyncModel.ckpt')
        endModel.save(name)
        endModel.fit_generator(
            trainGenerator,
            callbacks=[stopCallback, checkCallback, visCallback],
            nb_epoch=30,
            samples_per_epoch=epochBatchSize,
            nb_worker=8,
            pickle_safe=True)
        endModel.load_weights('psyncModel.ckpt')
        endModel.save(name)
        endModel.fit_generator(
            trainGenerator,
            callbacks=[stopCallback, checkCallback, visCallback],
            nb_epoch=40,
            samples_per_epoch=epochBatchSize,
            max_q_size=24,
            validation_data=valGenerator,
            nb_val_samples=len(dataVal),
            nb_worker=8,
            pickle_safe=True)
        endModel.load_weights('psyncModel.ckpt')
        endModel.save(name)

    endModel = load_model(name, custom_objects={'customLoss': customLoss})

    print(endModel.evaluate_generator(valGenerator, val_samples=len(dataVal)))
    print(
        endModel.evaluate_generator(generateImagesFromPaths(
            dataTest, batchSize, imShape, [3], angles),
                                    val_samples=len(dataTest)))