x.add(Flatten())
x.add(Dense(16, activation='relu'))
x.add(Dropout(0.5))
x.add(BatchNormalization())
# predictions = Dense(num_classes, activation = 'softmax')(x)
x.add(Dense(len(classes), activation="softmax"))

# Compile
x.compile(optimizer=Adam(lr=0.0001),
          loss='categorical_crossentropy',
          metrics=['accuracy'])

# Let's keep on csv file...
for c in batches.class_indices:
    classes[batches.class_indices[c]] = c
x.classes = classes
filename = 'model_train_new.csv'
csv_log = keras.callbacks.CSVLogger(filename, separator=',', append=False)

# Early stopping if loss doesnt improve
early_stopping = EarlyStopping(patience=10)
checkpointer = ModelCheckpoint('resnet_best.h5',
                               verbose=1,
                               save_best_only=True)

tensorboard_callback = keras.callbacks.TensorBoard(log_dir='./logs',
                                                   histogram_freq=0,
                                                   batch_size=BATCH_SIZE,
                                                   write_graph=True,
                                                   write_grads=False,
                                                   write_images=False,