def train(self): # load data (x_train, y_train), (x_test, y_test) = cifar10.load_data() y_train = keras.utils.to_categorical(y_train, self.num_classes) y_test = keras.utils.to_categorical(y_test, self.num_classes) x_train, x_test = self.color_preprocessing(x_train, x_test) # build network model = self.build_model() model.summary() # Save the best model during each training checkpoint checkpoint = ModelCheckpoint(self.model_filename, monitor='val_loss', verbose=0, save_best_only= True, mode='auto') plot_callback = PlotLearning() tb_cb = TensorBoard(log_dir=self.log_filepath, histogram_freq=0) cbks = [checkpoint, plot_callback, tb_cb] # set data augmentation print('Using real-time data augmentation.') datagen = ImageDataGenerator(horizontal_flip=True,width_shift_range=0.125,height_shift_range=0.125,fill_mode='constant',cval=0.) datagen.fit(x_train) # start training model.fit_generator(datagen.flow(x_train, y_train,batch_size=self.batch_size),steps_per_epoch=self.iterations,epochs=self.epochs,callbacks=cbks,validation_data=(x_test, y_test)) model.save(self.model_filename) self._model = model
def train(self): # load data (x_train, y_train), (x_test, y_test) = cifar10.load_data() y_train = keras.utils.to_categorical(y_train, self.num_classes) y_test = keras.utils.to_categorical(y_test, self.num_classes) x_train = x_train.astype('float32') x_test = x_test.astype('float32') # color preprocessing x_train, x_test = self.color_preprocessing(x_train, x_test) # build network img_input = Input(shape=(self.img_rows, self.img_cols, self.img_channels)) output = self.densenet(img_input, self.num_classes) model = Model(img_input, output) model.summary() # plot_model(model, show_shapes=True, to_file='model.png') # set optimizer sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True) model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy']) # set callback tb_cb = TensorBoard(log_dir=self.log_filepath, histogram_freq=0) change_lr = LearningRateScheduler(self.scheduler) ckpt = ModelCheckpoint(self.model_filename, monitor='val_loss', verbose=0, save_best_only=True, mode='auto') plot_callback = PlotLearning() cbks = [change_lr, tb_cb, ckpt, plot_callback] # set data augmentation print('Using real-time data augmentation.') datagen = ImageDataGenerator(horizontal_flip=True, width_shift_range=0.125, height_shift_range=0.125, fill_mode='constant', cval=0.) datagen.fit(x_train) # start training model.fit_generator(datagen.flow(x_train, y_train, batch_size=self.batch_size), steps_per_epoch=self.iterations, epochs=self.epochs, callbacks=cbks, validation_data=(x_test, y_test)) model.save(self.model_filename) self._model = model self.param_count = self._model.count_params()
def train(self): (x_train, y_train), (x_test, y_test) = cifar10.load_data() y_train = keras.utils.to_categorical(y_train, self.num_classes) y_test = keras.utils.to_categorical(y_test, self.num_classes) # color preprocessing x_train, x_test = self.color_preprocessing(x_train, x_test) model = self.pure_cnn_network(self.input_shape) model.summary() # Save the best model during each training checkpoint checkpoint = ModelCheckpoint(self.model_filename, monitor='val_loss', verbose=0, save_best_only=True, mode='auto') plot_callback = PlotLearning() tb_cb = TensorBoard(log_dir=self.log_filepath, histogram_freq=0) cbks = [checkpoint, plot_callback, tb_cb] # set data augmentation print('Using real-time data augmentation.') datagen = ImageDataGenerator(horizontal_flip=True, width_shift_range=0.125, height_shift_range=0.125, fill_mode='constant', cval=0.) datagen.fit(x_train) model.compile( loss= 'categorical_crossentropy', # Better loss function for neural networks optimizer=Adam(lr=self.learn_rate ), # Adam optimizer with 1.0e-4 learning rate metrics=['accuracy']) # Metrics to be evaluated by the model model.fit_generator(datagen.flow(x_train, y_train, batch_size=self.batch_size), epochs=self.epochs, validation_data=(x_test, y_test), callbacks=cbks, verbose=1) model.save(self.model_filename) self._model = model