callbacks=[csv_logger, checkpointer, early_stopper]) else: print('Using real-time data augmentation.') # This will do preprocessing and realtime data augmentation: datagen = ImageDataGenerator( featurewise_center=False, # set input mean to 0 over the dataset samplewise_center=False, # set each sample mean to 0 featurewise_std_normalization=False, # divide inputs by std samplewise_std_normalization=False, # divide each input by its std zca_whitening=False, # apply ZCA whitening # randomly rotate images in the range (degrees, 0 to 180) rotation_range=0, # randomly shift images horizontally (fraction of total width) width_shift_range=0.1, # randomly shift images vertically (fraction of total height) height_shift_range=0.1, horizontal_flip=True, # randomly flip images vertical_flip=False) # randomly flip images # Compute quantities required for feature-wise normalization # (std, mean, and principal components if ZCA whitening is applied). datagen.fit(x_train) # Fit the model on the batches generated by datagen.flow(). model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size), steps_per_epoch=x_train.shape[0] // batch_size, epochs=epochs, validation_data=(x_test, y_test), callbacks=[csv_logger, checkpointer, early_stopper])
train_gen = data_gen.flow_from_directory(path_train, batch_size=batch_size, target_size=shape, class_mode='categorical', color_mode='grayscale') valid_gen = data_gen.flow_from_directory(path_valid, batch_size=batch_size, target_size=shape, class_mode='categorical', color_mode='grayscale') history = [] try: history = model.fit_generator(train_gen, steps_epoch, epochs, valid_gen, valid_steps) print("Saving weights") model.save_weights(obj_weight) print("Saving history") pickle.dump(history.history, open(obj_history, 'wb')) except KeyboardInterrupt: print("\n\n --- Interruption ---\n ---Saving weights---") model.save_weights(obj_weight) print(" ---Saving history---") pickle.dump(history.history, open(obj_history, 'wb')) sys.exit(0)