model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adam(), metrics=['accuracy']) filepath = "weights-{epoch:02d}-{val_acc:.3f}.hdf5" checkpoint = keras.callbacks.ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max') reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='val_acc', factor=0.1, patience=5, cooldown=0, min_lr=1e-5) callbacks_list = [checkpoint, reduce_lr] datagen = ImageDataGenerator(width_shift_range=10. / 64, height_shift_range=10. / 64, horizontal_flip=True) # training loop batch_size = 64 epochs = 300 steps_per_epoch = int(np.ceil(x_train.shape[0] / float(batch_size))) model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size), steps_per_epoch=steps_per_epoch, epochs=epochs, validation_data=(x_val, y_val), callbacks=callbacks_list)
filepath = "weights-{epoch:02d}-{val_acc:.3f}.hdf5" checkpoint = keras.callbacks.ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max') reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='val_acc', factor=0.1, patience=5, cooldown=0, min_lr=1e-5) callbacks_list = [checkpoint, reduce_lr] dataaug = ImageDataGenerator(width_shift_range=10. / 64, height_shift_range=10. / 64, horizontal_flip=True) # training loop batch_size = 64 epochs = 300 steps_per_epoch = int(np.ceil(x_train.shape[0] / float(batch_size))) model.fit_generator(mixup_generator(x_train, y_train, batch_size, alpha=0.2, dataaug=dataaug), steps_per_epoch=steps_per_epoch, epochs=epochs, validation_data=(x_val, y_val), callbacks=callbacks_list)