def _fine_tuning(self): self.freeze_top_layers() self.model.compile( loss='categorical_crossentropy', optimizer=Adam(lr=1e-5), metrics=['accuracy']) self.model.summary() train_data = self.get_train_datagen(rotation_range=30., shear_range=0.2, zoom_range=0.2, horizontal_flip=True) callbacks = self.get_callbacks(config.get_fine_tuned_weights_path(), patience=self.fine_tuning_patience) if util.is_keras2(): self.model.fit_generator( train_data, steps_per_epoch=config.nb_train_samples / float(self.batch_size), epochs=self.nb_epoch, validation_data=self.get_validation_datagen(), validation_steps=config.nb_validation_samples / float(self.batch_size), callbacks=callbacks, class_weight=self.class_weight) else: self.model.fit_generator( train_data, samples_per_epoch=config.nb_train_samples, nb_epoch=self.nb_epoch, validation_data=self.get_validation_datagen(), nb_val_samples=config.nb_validation_samples, callbacks=callbacks, class_weight=self.class_weight) self.model.save(config.get_model_path())
def _fine_tuning(self): self.freeze_top_layers() self.model.compile( loss='categorical_crossentropy', optimizer=Adam(lr=1e-5), metrics=['accuracy']) self.model.summary() # save the model to png in order to visualize plot_model(self.model, to_file='model1.png', show_shapes=True) train_data = self.get_train_datagen(rotation_range=30., shear_range=0.2, zoom_range=0.2, horizontal_flip=True, preprocessing_function=self.preprocess_input) # early stopping callbacks = self.get_callbacks(config.get_fine_tuned_weights_path(), patience=self.fine_tuning_patience) if util.is_keras2(): self.model.fit_generator( train_data, steps_per_epoch=config.nb_train_samples / float(self.batch_size), epochs=self.nb_epoch, validation_data=self.get_validation_datagen(), validation_steps=config.nb_validation_samples / float(self.batch_size), callbacks=callbacks, class_weight=self.class_weight) else: self.model.fit_generator( train_data, samples_per_epoch=config.nb_train_samples, nb_epoch=self.nb_epoch, validation_data=self.get_validation_datagen(), nb_val_samples=config.nb_validation_samples, callbacks=callbacks, class_weight=self.class_weight) self.model.save(config.get_model_path())
def _fine_tuning(self): self.freeze_top_layers() self.model.compile( loss='binary_crossentropy', optimizer=Adam(lr=1e-5), #optimizer=SGD(lr=5e-6, momentum=0.9), metrics=['binary_accuracy']) train_data = self.get_train_datagen( rescale=1. / 255, rotation_range=60., #shear_range=0.2, #zoom_range=0.2, width_shift_range=0.2, height_shift_range=0.2, horizontal_flip=True, vertical_flip=True) callbacks = self.get_callbacks(config.get_fine_tuned_weights_path(), patience=self.fine_tuning_patience) if util.is_keras2(): hist = self.model.fit_generator( train_data, steps_per_epoch=config.nb_train_samples / float(self.batch_size), epochs=self.nb_epoch, validation_data=self.get_validation_datagen(rescale=1. / 255), #validation_data=self.get_validation_datagen(), validation_steps=config.nb_validation_samples / float(self.batch_size), callbacks=callbacks, class_weight=self.class_weight) else: hist = self.model.fit_generator( train_data, samples_per_epoch=config.nb_train_samples, nb_epoch=self.nb_epoch, validation_data=self.get_validation_datagen(), nb_val_samples=config.nb_validation_samples, callbacks=callbacks, class_weight=self.class_weight) print(hist.history) util.save_history(history=hist, prefix=time.time()) self.model.save(config.get_model_path())
def _fine_tuning(self): self.freeze_top_layers1() train_data = self.get_train_datagen( rotation_range=30., shear_range=0.2, zoom_range=0.2, horizontal_flip=True, preprocessing_function=self.preprocess_input) checkpoint_dir = os.path.join(os.path.abspath('.'), 'checkpoint') callbacks = self.get_callbacks(config.get_fine_tuned_weights_path(), checkpoint_dir, patience=self.fine_tuning_patience) if util.is_keras2(): if config.isCenterLoss: self.center_model.load_weights( '/home/yuzhg/Inception-v3/trained/fine-tuned-best-inception-weights.h5', by_name=True) self.center_model.compile(loss=[ 'categorical_crossentropy', lambda y_true, y_pred: y_pred ], loss_weights=[1, 0.2], metrics=['accuracy'], optimizer=Adam(lr=1e-5)) self.center_model.summary() self.history = self.center_model.fit_generator( util.clone_y_generator(train_data), steps_per_epoch=config.nb_train_samples / float(self.batch_size), epochs=self.nb_epoch, validation_data=util.clone_y_generator( self.get_validation_datagen()), validation_steps=config.nb_validation_samples / float(self.batch_size), callbacks=callbacks, class_weight=self.class_weight) elif config.isTripletLoss: self.triplet_model.load_weights( '/home/yuzhg/Inception-v3/trained/fine-tuned-best-inception-weights.h5', by_name=True) #self.triplet_model.compile(loss=self.hard_triplet_loss, optimizer=Adam(lr=1e-5), metrics=['accuracy']) self.triplet_model.compile( optimizer=Adam(lr=1e-5), loss=['categorical_crossentropy', self.hard_triplet_loss], loss_weights=[1.0, 1.0], metrics=['accuracy']) self.triplet_model.summary() valid_data = self.get_validation_datagen( rotation_range=30., shear_range=0.2, zoom_range=0.2, horizontal_flip=True, preprocessing_function=self.preprocess_input) # util.clone_y_generator1(train_data), self.history = self.triplet_model.fit_generator( #util.triplet_transformed_generator(train_data, 4096), util.clone_y_generator1(train_data), steps_per_epoch=config.nb_train_samples / float(self.batch_size), epochs=self.nb_epoch, #validation_data=util.triplet_transformed_generator(valid_data, 4096), validation_data=util.clone_y_generator1(valid_data), validation_steps=config.nb_validation_samples / float(self.batch_size), callbacks=callbacks, class_weight=self.class_weight) else: self.model.load_weights( '/home/yuzhg/Inception-v3/trained/fine-tuned-best-inception-weights.h5', by_name=True) self.model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=1e-5), metrics=['accuracy']) self.model.summary() self.history = self.model.fit_generator( train_data, steps_per_epoch=config.nb_train_samples / float(self.batch_size), epochs=self.nb_epoch, validation_data=self.get_validation_datagen( rotation_range=30., shear_range=0.2, zoom_range=0.2, horizontal_flip=True, preprocessing_function=self.preprocess_input), validation_steps=config.nb_validation_samples / float(self.batch_size), callbacks=callbacks, class_weight=self.class_weight) # else: # if config.isCenterLoss: # self.center_model.compile(loss=['categorical_crossentropy', lambda y_true, y_pred:y_pred], # loss_weights=[1, 0.2], metrics=['accuracy'], # optimizer=Adam(lr=1e-5)) # self.center_model.summary() # self.history = self.center_model.fit_generator( # util.clone_y_generator(train_data), # samples_per_epoch=config.nb_train_samples, # nb_epoch=self.nb_epoch, # validation_data=util.clone_y_generator(self.get_validation_datagen()), # nb_val_samples=config.nb_validation_samples, # callbacks=callbacks, # class_weight=self.class_weight) # elif config.isTripletLoss: # self.triplet_model.compile(loss=triplet_loss, optimizer=Adam(lr=1e-5)) # self.triplet_model.summary() # self.history = self.triplet_model.fit_generator( # util.clone_y_generator(train_data), # steps_per_epoch=config.nb_train_samples / float(self.batch_size), # epochs=self.nb_epoch, # validation_data=util.clone_y_generator(self.get_validation_datagen()), # validation_steps=config.nb_validation_samples / float(self.batch_size), # callbacks=callbacks, # class_weight=self.class_weight # ) # else: # self.model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=1e-5), metrics=['accuracy']) # self.model.summary() # self.history = self.model.fit_generator( # train_data, # steps_per_epoch=config.nb_train_samples / float(self.batch_size), # epochs=self.nb_epoch, # validation_data=self.get_validation_datagen(), # validation_steps=config.nb_validation_samples / float(self.batch_size), # callbacks=callbacks, # class_weight=self.class_weight # ) if config.isCenterLoss: #self.center_model.save_weights('vgg16-model-weights.h5') self.center_model.save(config.get_model_path()) util.save_history(self.history, self.center_model) elif config.isTripletLoss: self.triplet_model.save(config.get_model_path()) util.save_history(self.history, self.triplet_model) else: self.model.save(config.get_model_path()) util.save_history(self.history, self.model)