Example #1
0
        print(f"Loading weights from model: {weights_dir}...")
        model.load_weights(weights_dir)

        exp_dir = os.path.join(model_dir, 'f' + str(freeze_until) + str(now))
        if not os.path.exists(exp_dir):
            os.makedirs(exp_dir)

        callbacks_list = []

        # Model checkpoint
        if CHECKPOINTS:
            callbacks_list.append(callbacks.checkpoints(exp_dir))

        # Early stopping
        if EARLY_STOP:
            callbacks_list.append(callbacks.early_stopping(patience=7))

        # Tensorboard
        if TENSORBOARD:
            callbacks_list.append(callbacks.tensorboard(exp_dir))

        # Save best model
        # ----------------
        if SAVE_BEST:
            callbacks_list.append(callbacks.save_best(exp_dir))

        model.fit(
            x=train_dataset,
            epochs=1000,
            steps_per_epoch=len(train_flow),
            validation_data=validation_dataset,
Example #2
0
    def train_model(subdataset, species):
        dataset_dir = os.path.join(dasaset_base, subdataset, species)

        dataset = CustomDataset(
            dataset_dir, 'training', 
            img_generator=img_data_gen, mask_generator=mask_data_gen,
            preprocessing_function=preprocess_input
        )
        dataset_valid = CustomDataset(
            dataset_dir, 'validation', 
            preprocessing_function=preprocess_input
        )

        train_dataset = tf.data.Dataset.from_generator(
            lambda: dataset,
            output_types=(tf.float32, tf.float32),
            output_shapes=([img_h, img_w, 3], [img_h, img_w, 1])
        ).batch(bs).repeat()

        valid_dataset = tf.data.Dataset.from_generator(
            lambda: dataset_valid,
            output_types=(tf.float32, tf.float32),
            output_shapes=([img_h, img_w, 3], [img_h, img_w, 1])
        ).batch(bs).repeat()

        num_classes = 3
        model = create_model(img_h, img_w, num_classes=num_classes)

        model.summary()

        # Loss
        # Sparse Categorical Crossentropy to use integers (mask) instead of one-hot encoded labels
        loss = tf.keras.losses.SparseCategoricalCrossentropy() 

        # learning rate
        optimizer = tf.keras.optimizers.Adam(learning_rate=lr)

        # Validation metrics
        metrics = ['accuracy', gen_meanIoU(num_classes)]

        # Compile Model
        model.compile(optimizer=optimizer, loss=loss, metrics=metrics)

        # ---- Callbacks ----
        exps_dir = "experiments"
        if not os.path.exists(exps_dir):
            os.makedirs(exps_dir)

        model_dir = os.path.join(exps_dir, MODEL_NAME)
        if not os.path.exists(model_dir):
            os.makedirs(model_dir)

        exp_dir = os.path.join(model_dir, subdataset, species, str(now))
        if not os.path.exists(exp_dir):
            os.makedirs(exp_dir)

        callbacks_list = []

        # Model checkpoint
        if CHECKPOINTS:
            callbacks_list.append(callbacks.checkpoints(exp_dir))

        # Early stopping
        if EARLY_STOP:
            callbacks_list.append(callbacks.early_stopping(patience=10))

        # Save best model
        # ----------------
        best_checkpoint_path = None
        if SAVE_BEST:
            best_checkpoint_path, save_best_callback = callbacks.save_best(exp_dir)
            callbacks_list.append(save_best_callback)


        model.fit(
            x=train_dataset,
            epochs=epochs,
            steps_per_epoch=len(dataset),
            validation_data=valid_dataset,
            validation_steps=len(dataset_valid), 
            callbacks=callbacks_list
        )

        if PLOT:
            if best_checkpoint_path:
                model.load_weights(best_checkpoint_path)
            # ---- Prediction ----
            plot_predictions(model, valid_dataset, num_classes)