Example #1
0
def train_xception(img_size=IMG_SIZE, batch_size=BATCH_SIZE):
    xception_model = Xception(weights='imagenet', include_top=False)
    xception_model.summary()
    x = layers.GlobalAveragePooling2D()(xception_model.output)
    x = layers.Flatten()(x)
    x = layers.Dense(512)(x)
    x = layers.Dense(512)(x)
    x = layers.Dense(1000)(x)
    model = tf.keras.Model(xception_model.input, x)
    model.summary()
    for layer in model.layers[:-3]:
        layer.trainable = False
    model = tf.keras.models.load_model('./Models/Xception')
    train_df = pd.read_csv('./classes_train.csv')
    validate_df = pd.read_csv('./classes_validate.csv')
    model.compile(
        loss=tf.keras.losses.CategoricalCrossentropy(from_logits=True),
        optimizer=tf.keras.optimizers.Adam(beta_1=.999),
        metrics=['accuracy'])
    train_datagen = ImageDataGenerator(rescale=1. / 255)
    train_generator = train_datagen.flow_from_dataframe(
        dataframe=train_df,
        x_col='file',
        y_col='class_num',
        target_size=(img_size, img_size),
        validate_filenames=False,
        batch_size=batch_size,
        class_mode='categorical')
    validate_generator = train_datagen.flow_from_dataframe(
        dataframe=validate_df,
        x_col='file',
        y_col='class_num',
        validate_filenames=False,
        target_size=(img_size, img_size),
        batch_size=batch_size,
        class_mode='categorical')
    model.fit(train_generator,
              validation_data=validate_generator,
              epochs=10,
              workers=16,
              steps_per_epoch=VALID_SIZE // batch_size,
              callbacks=[
                  tf.keras.callbacks.ModelCheckpoint('./Models/Xception'),
                  tf.keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   patience=2)
              ])
    # print(model.evaluate(train_generator, steps=40, batch_size=batch_size*4))
    model.save('./Models/Xception')
 def train_model(self):
     """ Training the model """
     print("Training the model")
     LR = 1e-3
     epochs = 200
     callbacks = [
         EarlyStopping(monitor='val_loss',
                       min_delta=0,
                       patience=30,
                       verbose=0,
                       mode='auto'),
         ModelCheckpoint('model.h5',
                         monitor='val_loss',
                         mode='min',
                         save_best_only=True),
         ReduceLROnPlateau(monitor='val_loss',
                           factor=0.1,
                           patience=10,
                           verbose=0,
                           mode='auto',
                           min_delta=0.0001,
                           cooldown=0,
                           min_lr=0)
     ]
     # Pre trained model Xception without fully connected layers
     base_model = Xception(input_shape=(self.img_size[0], self.img_size[1],
                                        3),
                           include_top=False,
                           weights='imagenet')
     # Unfreeze the layers
     base_model.trainable = True
     x = GlobalMaxPooling2D()(base_model.output)
     x = Dense(512, activation='relu')(x)
     x = Dense(10, activation='relu')(x)
     output = Dense(1, activation='linear')(x)
     model = Model(inputs=base_model.input, outputs=output)
     model.compile(loss='mse',
                   optimizer=Adam(learning_rate=LR),
                   metrics=[self.mae_in_months])
     print(base_model.summary())
     print(model.summary())
     history = model.fit_generator(
         self.train_datagen.flow(self.x_train,
                                 self.y_train,
                                 batch_size=self.batch_size),
         steps_per_epoch=len(self.x_train) / self.batch_size,
         validation_data=self.val_datagen.flow(self.x_val,
                                               self.y_val,
                                               batch_size=self.batch_size),
         validation_steps=len(self.x_val) / self.batch_size,
         callbacks=callbacks,
         epochs=epochs,
         verbose=1)
     self.plot_it(history)
     model.load_weights('model.h5')
     pred = self.mean_bone_age + self.std_bone_age * (model.predict(
         self.x_val, batch_size=self.batch_size, verbose=True))
     actual = self.mean_bone_age + self.std_bone_age * (self.y_val)
def main():

    xception_model = Xception(weights='imagenet',
                              input_shape=(IMG_SIZE, IMG_SIZE, 3),
                              include_top=False)
    xception_model.summary()
    x = layers.GlobalAveragePooling2D()(xception_model.output)
    x = layers.Flatten()(x)
    x = layers.Dense(512)(x)
    x = layers.Dense(512)(x)
    x = layers.Dense(1000)(x)
    model = tf.keras.Model(xception_model.input, x)
    model.summary()
    train_df = pd.read_csv('./classes_train.csv')
    validate_df = pd.read_csv('./classes_validate.csv')
    model.compile(loss=tf.keras.losses.CategoricalCrossentropy(),
                  optimizer='adam',
                  metrics=['accuracy'])
    train_datagen = ImageDataGenerator(rescale=1. / 255)
    train_generator = train_datagen.flow_from_dataframe(
        dataframe=train_df,
        x_col='file',
        y_col='class_num',
        target_size=(IMG_SIZE, IMG_SIZE),
        validate_filenames=False,
        batch_size=BATCH_SIZE,
        class_mode='categorical')
    validate_generator = train_datagen.flow_from_dataframe(
        dataframe=validate_df,
        x_col='file',
        y_col='class_num',
        validate_filenames=False,
        target_size=(IMG_SIZE, IMG_SIZE),
        batch_size=BATCH_SIZE,
        class_mode='categorical')
    model.fit(train_generator,
              validation_data=validate_generator,
              epochs=10,
              workers=16)
    model.save('./Models/Xception')
Example #4
0
def create_model(input_shape, config):

    input_tensor = Input(
        shape=input_shape)  # this assumes K.image_dim_ordering() == 'tf'
    xception_model = Xception(include_top=False,
                              weights=None,
                              input_tensor=input_tensor)
    print(xception_model.summary())

    x = xception_model.output
    x = GlobalAveragePooling2D()(x)
    predictions = Dense(config["num_classes"], activation="softmax")(x)

    return Model(input=xception_model.input, output=predictions)
 def xception(weights='imagenet', include_top=False):
     model = Xception(weights=weights, include_top=False)
     print(model.summary())
     return model
Example #6
0
                             save_best_only=True,
                             mode='max')
# Log the epoch detail into csv
csv_logger = CSVLogger(os.path.join(output_folder, modelname + '.csv'))
callbacks_list = [checkpoint, csv_logger]

# define model
model = Xception(include_top=True,
                 input_shape=(IMG_SIZE, IMG_SIZE, 3),
                 weights=None,
                 classes=4)
model.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])

model.summary()
from tensorflow.keras.utils import plot_model
model_file = os.path.join(output_folder, modelname + "_model.png")
plot_model(model,
           to_file=model_file,
           show_shapes=True,
           show_layer_names=False,
           rankdir='TB')
# fit model

model.fit_generator(train_it,
                    validation_data=val_it,
                    epochs=50,
                    callbacks=callbacks_list)

#model.save(saved_model)