def main(): batch_size = 32 num_classes = 4 epochs = 100 save_dir = os.path.join(os.getcwd(), 'saved_models') model_name = 'orientation-inception.h5' data_train, data_test = load_data() # Use Google Inception v3 model model = InceptionV3( include_top=False, weights=None, input_shape=(192, 192, 3), pooling='softmax', classes=4, ) # initiate RMSprop optimizer opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6) # Let's train the model using RMSprop model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) if not os.path.isdir(save_dir): os.makedirs(save_dir) checkpointer = ModelCheckpoint( filepath=os.path.join(save_dir, 'checkpoint.h5'), verbose=1, save_best_only=True, ) early_stopping = EarlyStopping(monitor='val_loss', patience=2) train_generator = DataGenerator(data_train) val_generator = DataGenerator(data_test) model.fit_generator( train_generator.flow(batch_size=batch_size), epochs=epochs, validation_data=val_generator.flow(batch_size=batch_size), shuffle=True, callbacks=[checkpointer, early_stopping], ) # Save model and weights model_path = os.path.join(save_dir, model_name) model.save(model_path) print('Saved trained model at %s' % model_path) # Score trained model. scores = model.evaluate_generator( val_generator.flow(batch_size=batch_size)) print('Test loss:', scores[0]) print('Test accuracy:', scores[1])
def main(): batch_size = 32 num_classes = 4 epochs = 100 save_dir = os.path.join(os.getcwd(), 'saved_models') model_name = 'keras_orientation_trained_model.h5' # (x_train, y_train), (x_test, y_test) = load_data() # print('x_train shape:', x_train.shape) # print(x_train.shape[0], 'train samples') # print(x_test.shape[0], 'test samples') # Convert class vectors to binary class matrices. # y_train = keras.utils.to_categorical(y_train, num_classes) # y_test = keras.utils.to_categorical(y_test, num_classes) data_train, data_test = load_data2() model = Sequential() model.add(Conv2D(32, (3, 3), padding='same', input_shape=(128, 128, 3))) model.add(Activation('relu')) model.add(Conv2D(32, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Conv2D(64, (3, 3), padding='same')) model.add(Activation('relu')) model.add(Conv2D(64, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Conv2D(128, (3, 3), padding='same')) model.add(Activation('relu')) model.add(Conv2D(128, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(512)) model.add(Activation('relu')) model.add(Dropout(0.5)) model.add(Dense(num_classes)) model.add(Activation('softmax')) # initiate RMSprop optimizer opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6) # Let's train the model using RMSprop model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) if not os.path.isdir(save_dir): os.makedirs(save_dir) checkpointer = ModelCheckpoint(filepath=os.path.join( save_dir, 'checkpoint.hdf5'), verbose=1, save_best_only=True) early_stopping = EarlyStopping(monitor='val_loss', patience=2) train_generator = DataGenerator(data_train) val_generator = DataGenerator(data_test) model.fit_generator( train_generator.flow(batch_size=batch_size), epochs=epochs, validation_data=val_generator.flow(batch_size=batch_size), shuffle=True, callbacks=[checkpointer, early_stopping]) # Save model and weights model_path = os.path.join(save_dir, model_name) model.save(model_path) print('Saved trained model at %s' % model_path) # Score trained model. scores = model.evaluate_generator( val_generator.flow(batch_size=batch_size)) print('Test loss:', scores[0]) print('Test accuracy:', scores[1])