def main(model_name, model=models.wnet_connected, num_epochs=5, batch_size=2):
    '''Trains depth estimation model.'''

    segmentation_models.set_framework('tf.keras')
    print(segmentation_models.framework())

    #Build list of training filenames
    X_folderpath = r"G:\Documents\KITTI\data\train\X\\"
    y_folderpath = r"G:\Documents\KITTI\data\train\y\\"
    X_filelist = glob(X_folderpath + '*.png')
    y_filelist = glob(y_folderpath + '*.png')

    #Build list of validation filenames
    X_val_folderpath = r"G:\Documents\KITTI\data\val\X\\"
    y_val_folderpath = r"G:\Documents\KITTI\data\val\y\\"
    X_val_filelist = glob(X_val_folderpath + '*.png')
    y_val_filelist = glob(y_val_folderpath + '*.png')

    model = model()
    model.compile(loss='mean_squared_error',
                  optimizer=Adam(lr=1e-4))  #,metrics=['mse']

    #Save best model weights checkpoint
    filepath = f"{model_name}_weights_best.hdf5"
    checkpoint = ModelCheckpoint(filepath,
                                 monitor='val_loss',
                                 verbose=1,
                                 save_best_only=True,
                                 mode='min')

    #Tensorboard setup
    log_dir = f"logs\\{model_name}\\" + datetime.datetime.now().strftime(
        "%Y%m%d-%H%M%S")
    tensorboard_callback = TensorBoard(log_dir=log_dir)

    callbacks_list = [checkpoint, tensorboard_callback]

    model.fit_generator(
        _batchGenerator(X_filelist, y_filelist, batch_size),
        epochs=num_epochs,
        steps_per_epoch=len(X_filelist) // batch_size,
        #validation_data=(X_test,y_test),
        validation_data=_valBatchGenerator(X_val_filelist, y_val_filelist,
                                           batch_size),
        validation_steps=len(X_val_filelist) // batch_size,
        max_queue_size=1,
        callbacks=callbacks_list,
        verbose=2)

    return model
Ejemplo n.º 2
0
import geopandas as gpd
import numpy as np
import rasterio as rio
from rasterio.windows import Window
import tensorflow as tf
from tensorflow.keras.utils import to_categorical
import segmentation_models as sm
from segmentation_models.metrics import iou_score
from segmentation_models.losses import jaccard_loss, categorical_focal_jaccard_loss
from sklearn.preprocessing import MinMaxScaler

from models import regression_head, unet_model, unet_reg
import argparse

sm.set_framework('tf.keras')
sm.framework()

# indices https://www.sciencedirect.com/science/article/pii/S0303243422000290

## look into switching loss function to categorical_cross_entropy and maybe built in keras metric IOU from keras (done)

### change training data file to csv files (done mostly)
## change to 256 chip size, see if the csv files make a difference (done)
## if not get generators working (not needed yet)
### generate thresholded impoervious data maybe across all datasets (all years) (done)
#### create indices (ASI, NVDI, .....) (in progress)
### look into dice joccard loss as improvement
### when reading impervious mask REMEMBER data[data == 127] = 0 (done)

## Do I calculate indices before scaling by 16 bit scaler???
Ejemplo n.º 3
0
import pytest
import numpy as np

import segmentation_models as sm
from segmentation_models.metrics import IOUScore, FScore
from segmentation_models.losses import JaccardLoss, DiceLoss

if sm.framework() == sm._TF_KERAS_FRAMEWORK_NAME:
    from tensorflow import keras
elif sm.framework() == sm._KERAS_FRAMEWORK_NAME:
    import keras
else:
    raise ValueError('Incorrect framework {}'.format(sm.framework()))

METRICS = [
    IOUScore,
    FScore,
]

LOSSES = [
    JaccardLoss,
    DiceLoss,
]

GT0 = np.array(
    [
        [0, 0, 0],
        [0, 0, 0],
        [0, 0, 0],
    ],
    dtype='float32',
Ejemplo n.º 4
0
    def _build_model(self, lr):
        self.model = self.model(backbone_name=self.backbone_name,
                                input_shape=self.input_shape,
                                classes=self.classes,
                                encoder_weights=self.encoder_weights,
                                activation=self.activation)

        optimizer = Adam(lr=lr)
        self.model.compile(optimizer, categorical_focal_dice_loss,
                           [iou_score, precision, recall])

    def train(self,
              lr,
              train_gen,
              val_gen,
              epochs,
              workers=4,
              use_multiprocessing=False):
        self._build_model(lr)

        self.model.fit_generator(train_gen,
                                 validation_data=val_gen,
                                 epochs=epochs,
                                 workers=workers,
                                 use_multiprocessing=use_multiprocessing)


if __name__ == "__main__":
    print(sm.framework())