예제 #1
0
def load_model_HDF5(model_path):
    """Loads model (HDF5 format), training setup and training history.
    This format makes it difficult to load a trained model for further training,
    but works good enough for one training round."""

    # load loss function used in training
    info = get_model_info(model_path)
    loss = info["model"]["loss"]
    dynamic_range = info["preprocessing"]["dynamic_range"]

    # load autoencoder
    if loss == "mssim":
        model = keras.models.load_model(
            filepath=model_path,
            custom_objects={
                "LeakyReLU": keras.layers.LeakyReLU,
                "loss": losses.mssim_loss(dynamic_range),
                "mssim": metrics.mssim_metric(dynamic_range),
            },
            compile=True,
        )

    elif loss == "ssim":
        model = keras.models.load_model(
            filepath=model_path,
            custom_objects={
                "LeakyReLU": keras.layers.LeakyReLU,
                "loss": losses.ssim_loss(dynamic_range),
                "ssim": metrics.ssim_metric(dynamic_range),
            },
            compile=True,
        )

    else:
        model = keras.models.load_model(
            filepath=model_path,
            custom_objects={
                "LeakyReLU": keras.layers.LeakyReLU,
                "l2_loss": losses.l2_loss,
                "ssim": losses.ssim_loss(dynamic_range),
                "mssim": metrics.mssim_metric(dynamic_range),
            },
            compile=True,
        )

    # load training history
    dir_name = os.path.dirname(model_path)
    history = pd.read_csv(os.path.join(dir_name, "history.csv"))

    return model, info, history
예제 #2
0
def load_model_HDF5(model_path):
    """
    Loads model (HDF5 format), training setup and training history.
    """

    # load parameters
    info = get_model_info(model_path)
    loss = info["model"]["loss"]
    dynamic_range = info["preprocessing"]["dynamic_range"]

    # load autoencoder
    if loss == "mssim":
        model = keras.models.load_model(
            filepath=model_path,
            custom_objects={
                "LeakyReLU": keras.layers.LeakyReLU,
                "loss": losses.mssim_loss(dynamic_range),
                "mssim": metrics.mssim_metric(dynamic_range),
            },
            compile=True,
        )

    elif loss == "ssim":
        model = keras.models.load_model(
            filepath=model_path,
            custom_objects={
                "LeakyReLU": keras.layers.LeakyReLU,
                "loss": losses.ssim_loss(dynamic_range),
                "ssim": metrics.ssim_metric(dynamic_range),
            },
            compile=True,
        )

    else:
        model = keras.models.load_model(
            filepath=model_path,
            custom_objects={
                "LeakyReLU": keras.layers.LeakyReLU,
                "l2_loss": losses.l2_loss,
                "ssim": losses.ssim_loss(dynamic_range),
                "mssim": metrics.mssim_metric(dynamic_range),
            },
            compile=True,
        )

    # load training history
    dir_name = os.path.dirname(model_path)
    history = pd.read_csv(os.path.join(dir_name, "history.csv"))

    return model, info, history
    def __init__(
        self,
        input_directory,
        architecture,
        color_mode,
        loss,
        batch_size=8,
        verbose=True,
    ):
        # path attrivutes
        self.input_directory = input_directory
        self.save_dir = None
        self.log_dir = None

        # model and data attributes
        self.architecture = architecture
        self.color_mode = color_mode
        self.loss = loss
        self.batch_size = batch_size

        # learning rate finder attributes
        self.opt_lr = None
        self.opt_lr_i = None
        self.base_lr = None
        self.base_lr_i = None

        # training attributes
        self.learner = None

        # results attributes
        self.hist = None
        self.epochs_trained = None

        # build model and preprocessing variables
        if architecture == "mvtecCAE":
            # Preprocessing parameters
            self.model = mvtecCAE.build_model(color_mode)
            self.rescale = mvtecCAE.RESCALE
            self.shape = mvtecCAE.SHAPE
            self.preprocessing_function = mvtecCAE.PREPROCESSING_FUNCTION
            self.preprocessing = mvtecCAE.PREPROCESSING
            self.vmin = mvtecCAE.VMIN
            self.vmax = mvtecCAE.VMAX
            self.dynamic_range = mvtecCAE.DYNAMIC_RANGE
            # Learning Rate Finder parameters
            self.start_lr = mvtecCAE.START_LR
            self.lr_max_epochs = mvtecCAE.LR_MAX_EPOCHS
            self.lrf_decrease_factor = mvtecCAE.LRF_DECREASE_FACTOR
            # Training parameters
            self.early_stopping = mvtecCAE.EARLY_STOPPING
            self.reduce_on_plateau = mvtecCAE.REDUCE_ON_PLATEAU

        elif architecture == "baselineCAE":
            # Preprocessing parameters
            self.model = baselineCAE.build_model(color_mode)
            self.rescale = baselineCAE.RESCALE
            self.shape = baselineCAE.SHAPE
            self.preprocessing_function = baselineCAE.PREPROCESSING_FUNCTION
            self.preprocessing = baselineCAE.PREPROCESSING
            self.vmin = baselineCAE.VMIN
            self.vmax = baselineCAE.VMAX
            self.dynamic_range = baselineCAE.DYNAMIC_RANGE
            # Learning Rate Finder parameters
            self.start_lr = baselineCAE.START_LR
            self.lr_max_epochs = baselineCAE.LR_MAX_EPOCHS
            self.lrf_decrease_factor = baselineCAE.LRF_DECREASE_FACTOR
            # Training parameters
            self.early_stopping = baselineCAE.EARLY_STOPPING
            self.reduce_on_plateau = baselineCAE.REDUCE_ON_PLATEAU

        elif architecture == "inceptionCAE":
            # Preprocessing parameters
            self.model = inceptionCAE.build_model(color_mode)
            self.rescale = inceptionCAE.RESCALE
            self.shape = inceptionCAE.SHAPE
            self.preprocessing_function = inceptionCAE.PREPROCESSING_FUNCTION
            self.preprocessing = inceptionCAE.PREPROCESSING
            self.vmin = inceptionCAE.VMIN
            self.vmax = inceptionCAE.VMAX
            self.dynamic_range = inceptionCAE.DYNAMIC_RANGE
            # Learning Rate Finder parameters
            self.start_lr = inceptionCAE.START_LR
            self.lr_max_epochs = inceptionCAE.LR_MAX_EPOCHS
            self.lrf_decrease_factor = inceptionCAE.LRF_DECREASE_FACTOR
            # Training parameters
            self.early_stopping = inceptionCAE.EARLY_STOPPING
            self.reduce_on_plateau = inceptionCAE.REDUCE_ON_PLATEAU

        elif architecture == "resnetCAE":
            # Preprocessing parameters
            self.model = resnetCAE.build_model(color_mode)
            self.rescale = resnetCAE.RESCALE
            self.shape = resnetCAE.SHAPE
            self.preprocessing_function = resnetCAE.PREPROCESSING_FUNCTION
            self.preprocessing = resnetCAE.PREPROCESSING
            self.vmin = resnetCAE.VMIN
            self.vmax = resnetCAE.VMAX
            self.dynamic_range = resnetCAE.DYNAMIC_RANGE
            # Learning Rate Finder parameters
            self.start_lr = resnetCAE.START_LR
            self.lr_max_epochs = resnetCAE.LR_MAX_EPOCHS
            self.lrf_decrease_factor = resnetCAE.LRF_DECREASE_FACTOR
            # Training parameters
            self.early_stopping = resnetCAE.EARLY_STOPPING
            self.reduce_on_plateau = resnetCAE.REDUCE_ON_PLATEAU

        elif architecture == "skipCAE":
            # Preprocessing parameters
            self.model = skipCAE.build_model(color_mode)
            self.rescale = skipCAE.RESCALE
            self.shape = skipCAE.SHAPE
            self.preprocessing_function = skipCAE.PREPROCESSING_FUNCTION
            self.preprocessing = skipCAE.PREPROCESSING
            self.vmin = skipCAE.VMIN
            self.vmax = skipCAE.VMAX
            self.dynamic_range = skipCAE.DYNAMIC_RANGE
            # Learning Rate Finder parameters
            self.start_lr = skipCAE.START_LR
            self.lr_max_epochs = skipCAE.LR_MAX_EPOCHS
            self.lrf_decrease_factor = skipCAE.LRF_DECREASE_FACTOR
            # Training parameters
            self.early_stopping = skipCAE.EARLY_STOPPING
            self.reduce_on_plateau = skipCAE.REDUCE_ON_PLATEAU

        # verbosity
        self.verbose = verbose
        if verbose:
            self.model.summary()

        # set loss function
        if loss == "ssim":
            self.loss_function = losses.ssim_loss(self.dynamic_range)
        elif loss == "mssim":
            self.loss_function = losses.mssim_loss(self.dynamic_range)
        elif loss == "l2":
            self.loss_function = losses.l2_loss

        # set metrics to monitor training
        if color_mode == "grayscale":
            self.metrics = [metrics.ssim_metric(self.dynamic_range)]
            self.hist_keys = ("loss", "val_loss", "ssim", "val_ssim")
        elif color_mode == "rgb":
            self.metrics = [metrics.mssim_metric(self.dynamic_range)]
            self.hist_keys = ("loss", "val_loss", "mssim", "val_mssim")

        # create directory to save model and logs
        self.create_save_dir()

        # compile model
        self.model.compile(loss=self.loss_function,
                           optimizer="adam",
                           metrics=self.metrics)
        return
예제 #4
0
    def __init__(
        self,
        input_directory,
        architecture,
        color_mode,
        loss,
        batch_size=8,
        verbose=True,
    ):
        # path attrivutes
        self.input_directory = input_directory
        self.save_dir = None
        self.log_dir = None

        # model and data attributes
        self.architecture = architecture
        self.color_mode = color_mode
        self.loss = loss
        self.batch_size = batch_size

        # learning rate finder attributes
        self.opt_lr = None
        self.opt_lr_i = None
        self.base_lr = None
        self.base_lr_i = None

        # training attributes
        self.learner = None

        # results attributes
        self.hist = None
        self.epochs_trained = None

        # build model and preprocessing variables
        if architecture == "mvtec":
            self.model = mvtec.build_model(color_mode)
            self.rescale = mvtec.RESCALE
            self.shape = mvtec.SHAPE
            self.preprocessing_function = mvtec.PREPROCESSING_FUNCTION
            self.preprocessing = mvtec.PREPROCESSING
            self.vmin = mvtec.VMIN
            self.vmax = mvtec.VMAX
            self.dynamic_range = mvtec.DYNAMIC_RANGE
        elif architecture == "mvtec2":
            self.model = mvtec_2.build_model(color_mode)
            self.rescale = mvtec_2.RESCALE
            self.shape = mvtec_2.SHAPE
            self.preprocessing_function = mvtec_2.PREPROCESSING_FUNCTION
            self.preprocessing = mvtec_2.PREPROCESSING
            self.vmin = mvtec_2.VMIN
            self.vmax = mvtec_2.VMAX
            self.dynamic_range = mvtec_2.DYNAMIC_RANGE
        elif architecture == "baselineCAE":
            self.model = baselineCAE.build_model(color_mode)
            self.rescale = baselineCAE.RESCALE
            self.shape = baselineCAE.SHAPE
            self.preprocessing_function = baselineCAE.PREPROCESSING_FUNCTION
            self.preprocessing = baselineCAE.PREPROCESSING
            self.vmin = baselineCAE.VMIN
            self.vmax = baselineCAE.VMAX
            self.dynamic_range = baselineCAE.DYNAMIC_RANGE
        elif architecture == "inceptionCAE":
            self.model = inceptionCAE.build_model(color_mode)
            self.rescale = inceptionCAE.RESCALE
            self.shape = inceptionCAE.SHAPE
            self.preprocessing_function = inceptionCAE.PREPROCESSING_FUNCTION
            self.preprocessing = inceptionCAE.PREPROCESSING
            self.vmin = inceptionCAE.VMIN
            self.vmax = inceptionCAE.VMAX
            self.dynamic_range = inceptionCAE.DYNAMIC_RANGE
        elif architecture == "resnetCAE":
            self.model = resnetCAE.build_model(color_mode)
            self.rescale = resnetCAE.RESCALE
            self.shape = resnetCAE.SHAPE
            self.preprocessing_function = resnetCAE.PREPROCESSING_FUNCTION
            self.preprocessing = resnetCAE.PREPROCESSING
            self.vmin = resnetCAE.VMIN
            self.vmax = resnetCAE.VMAX
            self.dynamic_range = resnetCAE.DYNAMIC_RANGE

        # verbosity
        self.verbose = verbose
        if verbose:
            self.model.summary()

        # set loss function
        if loss == "ssim":
            self.loss_function = losses.ssim_loss(self.dynamic_range)
        elif loss == "mssim":
            self.loss_function = losses.mssim_loss(self.dynamic_range)
        elif loss == "l2":
            self.loss_function = losses.l2_loss

        # set metrics to monitor training
        if color_mode == "grayscale":
            self.metrics = [metrics.ssim_metric(self.dynamic_range)]
            self.hist_keys = ("loss", "val_loss", "ssim", "val_ssim")
        elif color_mode == "rgb":
            self.metrics = [metrics.mssim_metric(self.dynamic_range)]
            self.hist_keys = ("loss", "val_loss", "mssim", "val_mssim")

        # create directory to save model and logs
        self.create_save_dir()

        # compile model
        optimizer = keras.optimizers.Adam(learning_rate=START_LR)
        self.model.compile(loss=self.loss_function,
                           optimizer=optimizer,
                           metrics=self.metrics)
        return