示例#1
0
train_df = pd.read_csv('train.csv')
validation_df = pd.read_csv('validation.csv')
test_df = pd.read_csv('test.csv')

# train_df = train_df.head(100)
# validation_df = validation_df.head(100)

image_size = 224
data_generator = ImageDataGenerator(preprocessing_function=preprocess_input)

train_generator = data_generator.flow_from_dataframe(train_df,
                                                     "images_train/train",
                                                     x_col='filename',
                                                     y_col='category',
                                                     target_size=(image_size,
                                                                  image_size),
                                                     batch_size=5,
                                                     class_mode='categorical',
                                                     shuffle=False)

validation_generator = data_generator.flow_from_dataframe(
    validation_df,
    "images_validation/validation",
    x_col='filename',
    y_col='category',
    target_size=(image_size, image_size),
    batch_size=5,
    class_mode='categorical',
    shuffle=False)
示例#2
0
    def train(self,
              dataset: Union[tf.data.Dataset, ClassificationDataset],
              model: tf.keras.Model,
              init_epoch: int,
              epochs: int,
              batch_size: int,
              callbacks: List[tf.keras.callbacks.Callback],
              class_weights=None,
              augmentation: bool = False):
        """
        Compiles and trains the model for the specified number of epochs.
        """

        self.__logs['training'].info('Training the model...\n')

        # Display the architecture of the model
        self.__logs['training'].info('Architecture of the model:')
        model.summary()

        # Train the model
        self.__logs['training'].info('Starting the fitting procedure:')
        self.__logs['training'].info('* Total number of epochs:   ' +
                                     str(epochs))
        self.__logs['training'].info('* Initial epoch:            ' +
                                     str(init_epoch) + '\n')

        training_set, training_set_size = dataset.get_training_set()
        validation_set, validation_set_size = dataset.get_validation_set()
        training_steps = training_set_size // batch_size + 1
        validation_steps = validation_set_size // batch_size + 1

        if augmentation:
            x_train, y_train = dataset.get_xy_training()
            x_val, y_val = dataset.get_xy_validation()

            train_image_data_generator = ImageDataGenerator(
                brightness_range=[0.7, 1.0],
                rotation_range=10,
                width_shift_range=0.1,
                height_shift_range=0.1,
                zoom_range=.1)
            val_image_data_generator = ImageDataGenerator()

            train_generator = train_image_data_generator.flow_from_dataframe(
                dataframe=pd.DataFrame({
                    'image': x_train,
                    'class': y_train
                }),
                directory='',
                x_col='image',
                y_col='class',
                class_mode='other',
                target_size=(self.__input_width, self.__input_height),
                batch_size=batch_size)

            val_generator = val_image_data_generator.flow_from_dataframe(
                dataframe=pd.DataFrame({
                    'image': x_val,
                    'class': y_val
                }),
                directory='',
                x_col='image',
                y_col='class',
                class_mode='other',
                target_size=(self.__input_width, self.__input_height),
                batch_size=batch_size)

            model.fit_generator(train_generator,
                                epochs=epochs,
                                steps_per_epoch=training_steps,
                                validation_data=val_generator,
                                validation_steps=validation_steps,
                                callbacks=callbacks,
                                initial_epoch=init_epoch,
                                class_weight=class_weights)
        else:
            model.fit(training_set,
                      epochs=epochs,
                      steps_per_epoch=training_steps,
                      validation_data=validation_set,
                      validation_steps=validation_steps,
                      callbacks=callbacks,
                      initial_epoch=init_epoch,
                      class_weight=class_weights)

        self.__logs['training'].info(
            'Training procedure performed successfully!\n')
示例#3
0
                                     stratify=train_df[['is_oval']])

TrainImageDataGenerator = ImageDataGenerator(height_shift_range=0.1,
                                             width_shift_range=0.1,
                                             rotation_range=5,
                                             shear_range=0.01,
                                             fill_mode='reflect',
                                             zoom_range=0.15,
                                             rescale=1. / 255.)
testImageGenerator = ImageDataGenerator(
    # rescale=1. / 255.
)

train_gen = TrainImageDataGenerator.flow_from_dataframe(train_dfe,
                                                        x_col='splimage_path',
                                                        y_col='is_oval',
                                                        class_mode='binary',
                                                        batch_size=28,
                                                        target_size=Img_size)
val_gen = TrainImageDataGenerator.flow_from_dataframe(val_df,
                                                      x_col='splimage_path',
                                                      y_col='is_oval',
                                                      class_mode='binary',
                                                      batch_size=28,
                                                      target_size=Img_size)
test_gen = testImageGenerator.flow_from_dataframe(
    test_df,
    x_col='splimage_path',
    y_col='is_oval',
    class_mode='binary',
    batch_size=28,
    shuffle=False,
示例#4
0
model.add(Flatten())
model.add(Dense(num_classes, activation='sigmoid'))

model.compile(optimizer=SGD(lr=0.01, momentum=0.9, decay=1e-4),
              loss='binary_crossentropy',
              metrics=[mzz_metrics])

model.summary()

model.load_weights("./Resnet50_og.12-0.7703.hdf5")
data = pd.read_csv('for_test.txt', header=None, names=['id'])
val_data = data
test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)

test_generator = test_datagen.flow_from_dataframe(dataframe=val_data,
                                                  directory="./val2014/",
                                                  x_col="id",
                                                  y_col=None,
                                                  batch_size=64,
                                                  seed=42,
                                                  shuffle=False,
                                                  class_mode=None,
                                                  target_size=(224, 224))

STEP_SIZE_TEST = test_generator.n // test_generator.batch_size
test_generator.reset()
pred = model.predict_generator(test_generator, steps=STEP_SIZE_TEST, verbose=1)
predicted_class_indices = np.argmax(pred, axis=1)
with open('TEST_predict_label.pkl', 'wb') as f:
    pickle.dump(predicted_class_indices, f)
示例#5
0
# -------------------
# DATA PREPROCESSING |
# -------------------

test_image_datagen = ImageDataGenerator(rescale=1. / 255)

training_image_datagen = ImageDataGenerator(rescale=1. / 255,
                                            shear_range=0.2,
                                            zoom_range=0.4,
                                            horizontal_flip=True)

training_generator = training_image_datagen.flow_from_dataframe(
    paths[0],
    x_col='x_col',
    y_col='y_col',
    target_size=(hprm['INPUT_H'], hprm['INPUT_W']),
    batch_size=hprm['BATCH_SIZE'],
    class_mode='sparse')

validation_generator = test_image_datagen.flow_from_dataframe(
    paths[1],
    x_col='x_col',
    y_col='y_col',
    target_size=(hprm['INPUT_H'], hprm['INPUT_W']),
    batch_size=hprm['BATCH_SIZE'],
    class_mode='sparse')

# Pool classes to exclude U-bar/LoopLock differentiation for the first model
# class_pool_mapping = {0: 0, 1: 0, 2: 0, 3: 0, 4: 1, 5: 1, 6: 1, 7: 1}
# class_pool_mapping = {0: 0, 1: 0, 2: 1, 3: 1}