def classifier(self) -> Model: """ Returns the classifier of this configuration """ model = Xception(include_top=False, weights='imagenet', input_shape=self.data_shape, pooling='avg') dense = Dense(2, activation='softmax', name='predictions')(model.output) model = Model(model.input, dense, name='xception') # optimizer = SGD(lr=self.learning_rate, momentum=self.nesterov_momentum, nesterov=True) optimizer = Adadelta(lr=self.learning_rate) model.compile(optimizer, loss="categorical_crossentropy", metrics=["accuracy"]) return model
class Xceptionmodel(CNNmodel): def __init__(self, data_directory, batch_size=16): self.cnn = CNNmodel(data_directory) self.height, self.width, self.depth = (48, 48, 1) self.batch_size = batch_size self.xception = Xception(include_top=True, weights=None, input_shape=(self.height, self.width, self.depth), classes=4) def summary(self): print(self.xception.summary()) def train(self, optimizer='adam', loss='categorical_crossentropy', epoch=20): self.xception.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["accuracy"]) print("[INFO] training network...") es = EarlyStopping(monitor='val_acc', mode='max', verbose=1, patience=4) self.history = self.xception.fit(self.cnn.train_data, epochs=epoch, validation_data=self.cnn.val_data, verbose=1, callbacks=[es]) print("[INFO] Saving model...") pickle.dump(self.xception, open('xception.pkl', 'wb')) print("[INFO] Done...")
## checkpoint that saves the best weights according to the validation accuracy checkpoint = ModelCheckpoint(filepath=modelpath, monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=True) ## csv_logger to write losses and accuracies after each epoch in csv file csv_logger = CSVLogger(filename=csvpath, separator=',', append=True) print('[INFO] compiling model...') if worker == 'single': ## Adam or RMSProp with step learning rate decay: ## https://towardsdatascience.com/learning-rate-schedules-and-adaptive-learning-rate-methods-for-deep-learning-2c8f433990d1 model.compile(optimizer=Adam(lr_schedule(0)), loss='categorical_crossentropy', metrics=['accuracy']) elif worker == 'parallel': parallel_model = multi_gpu_model(model, gpus=2) parallel_model.compile(optimizer=Adam(lr_schedule(0)), loss='categorical_crossentropy', metrics=['accuracy']) ############### version balance (uncomment to balance the data set) #################################################### # X_train_reshape = X_train.reshape(len(y_train), -1) # ros = RandomOverSampler(random_state = 42) # X_train_reshape_resample, y_train = ros.fit_resample(X_train_reshape, y_train) # X_train = X_train_reshape_resample.reshape(len(X_train_reshape_resample), 299, 299, 3) ############### end version balance (uncomment to balance the data set) ################################################
def build(self): if self.model_name == 'VGG16': model = VGG16(include_top=True, weights=None, input_tensor=None, input_shape=self.image_shape, pooling='max', classes=self.classes) elif self.model_name == 'VGG19': model = VGG19(include_top=True, weights=None, input_tensor=None, input_shape=self.image_shape, pooling='max', classes=self.classes) elif self.model_name == 'ResNet50': model = ResNet50(include_top=True, weights=None, input_tensor=None, input_shape=self.image_shape, pooling='max', classes=self.classes) elif self.model_name == 'InceptionV3': model = InceptionV3(include_top=True, weights=None, input_tensor=None, input_shape=self.image_shape, pooling='max', classes=self.classes) elif self.model_name == 'Xception': model = Xception(include_top=True, weights=None, input_tensor=None, input_shape=self.image_shape, pooling='max', classes=self.classes) elif self.model_name == 'MobileNet': model = MobileNet(include_top=True, weights=None, input_tensor=None, input_shape=self.image_shape, pooling='max', classes=self.classes) elif self.model_name == 'DenseNet': model = DenseNet_Model(self.image_shape, self.classes).build_model() elif self.model_name == 'ResNet18': model = Resnet_Model(self.image_shape, self.classes).build_resnet18() elif self.model_name == 'ResNet34': model = Resnet_Model(self.image_shape, self.classes).build_resnet34() elif self.model_name == 'ResNet101': model = Resnet_Model(self.image_shape, self.classes).build_resnet101() elif self.model_name == 'ResNet152': model = Resnet_Model(self.image_shape, self.classes).build_resnet152() elif self.model_name == 'AlexNet': model = MODEL(self.image_shape, self.classes).AlexNet() elif self.model_name == 'LeNet': model = MODEL(self.image_shape, self.classes).LeNet() elif self.model_name == 'ZF_Net': model = MODEL(self.image_shape, self.classes).ZF_Net() elif self.model_name == 'mnist_net': model = MODEL(self.image_shape, self.classes).mnist_net() elif self.model_name == 'VGG16_TSL': model = MODEL(self.image_shape, self.classes).VGG16_TSL() adam = Adam(lr=self.lr, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0) model.compile(loss="categorical_crossentropy", optimizer=adam, metrics=["accuracy"]) return model
from keras.utils import to_categorical from keras import losses,metrics from keras.callbacks import ModelCheckpoint import Dataset_mobile import os if __name__ == '__main__': save_dir = os.path.join(os.getcwd(), 'saved_models') model_name = 'xceptionMobile_wood_model.h5' model_path = os.path.join(save_dir, model_name) x_train, y_train, x_test, y_test, dictionary = Dataset_mobile.read_data() x_train_resized = Dataset_mobile.resize_imgs(x_train) x_test_resized = Dataset_mobile.resize_imgs(x_test) y_train = to_categorical(y_train, num_classes=2) y_test = to_categorical(y_test, num_classes=2) model = Xception(include_top=True, weights=None, classes=2) opt = Adam(lr=5e-6) model.compile(optimizer=opt, loss=losses.categorical_crossentropy, metrics=[metrics.categorical_accuracy]) model.fit(x_train_resized,y_train,epochs=20,batch_size=6) model.save(model_path) score1 = model.evaluate(x_train_resized, y_train, batch_size=6) score2 = model.evaluate(x_test_resized, y_test, batch_size=6) print(score1) print(score2)
import tensorflow as tf from keras.applications import Xception import datetime import numpy as np num_samples = 1000 height = 224 width = 224 num_classes = 1000 start = datetime.datetime.now() with tf.device('/cpu:0'): model = Xception(weights=None, input_shape=(height, width, 3), classes=num_classes) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') x = np.random.random((num_samples, height, width, 3)) y = np.random.random((num_samples, num_classes)) model.fit(x, y, epochs=3, batch_size=16) model.save('my_model.h5') end = datetime.datetime.now() time_delta = end - start print('GPU 처리시간:', time_delta) start = datetime.datetime.now() with tf.device('/cpu:0'): model = Xception(weights=None, input_shape=(height, width, 3), classes=num_classes) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') x = np.random.random((num_samples, height, width, 3))
# nb_validation_samples = 2000 epochs = 50 steps = nb_train_samples // batch_size saving_path = r'D:\warelee\datasets\TrainModel\xception\nfl' classes = 7 weights = os.path.join(r'D:\warelee\datasets\TrainModel\xception\nfl', 'epoch4-train_loss0.160-train_acc1.000.h5') # weights=None # network model = Xception(include_top=True, weights=weights, input_shape=(img_height, img_width, 3), classes=classes) print(model.summary()) model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) def _preprocessing_function(np_img): return np_img / 127. - 1 # data prepare datagen = ImageDataGenerator(rotation_range=10, width_shift_range=0.1, zoom_range=0.1, horizontal_flip=True, fill_mode='constant', preprocessing_function=_preprocessing_function) train_generator = datagen.flow_from_directory(train_data_dir,