示例#1
0
    def __init__(
        self,
        data_path,
        save_path,
        eval_paths=[],
        basic_model=-2,
        model=None,
        compile=True,
        batch_size=128,
        lr_base=0.001,
        lr_decay=0.05,
        lr_min=0,
        eval_freq=1,
        random_status=0,
        custom_objects={},
    ):
        self.model, self.basic_model = None, None
        if isinstance(model, str):
            if model.endswith(".h5") and os.path.exists(model) and isinstance(
                    basic_model, int):
                print(">>>> Load model from h5 file: %s..." % model)
                custom_objects.update({
                    "NormDense": NormDense,
                    "margin_softmax": losses.margin_softmax,
                    "arcface_loss": losses.arcface_loss,
                    "ArcfaceLoss": losses.ArcfaceLoss,
                    "CenterLoss": losses.CenterLoss,
                })
                with keras.utils.custom_object_scope(custom_objects):
                    self.model = keras.models.load_model(
                        model, compile=compile, custom_objects=custom_objects)
                self.basic_model = keras.models.Model(
                    self.model.inputs[0],
                    self.model.layers[basic_model].output)
                self.model.summary()
        elif isinstance(model, keras.models.Model):
            self.model = model
            self.basic_model = keras.models.Model(
                self.model.inputs[0], self.model.layers[basic_model].output)
        elif isinstance(basic_model, str):
            if basic_model.endswith(".h5") and os.path.exists(basic_model):
                custom_objects.update({
                    "batch_hard_triplet_loss":
                    losses.batch_hard_triplet_loss,
                    "batch_all_triplet_loss":
                    losses.batch_all_triplet_loss,
                    "BatchHardTripletLoss":
                    losses.BatchHardTripletLoss,
                    "BatchAllTripletLoss":
                    losses.BatchAllTripletLoss,
                })
                print(">>>> Load basic_model from h5 file: %s..." %
                      basic_model)
                with keras.utils.custom_object_scope(custom_objects):
                    self.basic_model = keras.models.load_model(
                        basic_model,
                        compile=compile,
                        custom_objects=custom_objects)
        elif isinstance(basic_model, keras.models.Model):
            self.basic_model = basic_model

        if self.basic_model == None:
            print(
                "Initialize model by:\n"
                "| basicmodel                               | model           |\n"
                "| ---------------------------------------- | --------------- |\n"
                "| model structure                          | None            |\n"
                "| basic model .h5 file                     | None            |\n"
                "| model layer index for basic model output | model .h5 file  |\n"
                "| model layer index for basic model output | model structure |\n"
            )
            return

        self.softmax, self.arcface, self.triplet = "softmax", "arcface", "triplet"

        self.batch_size = batch_size
        if tf.distribute.has_strategy():
            strategy = tf.distribute.get_strategy()
            self.batch_size = batch_size * strategy.num_replicas_in_sync
            print(">>>> num_replicas_in_sync: %d, batch_size: %d" %
                  (strategy.num_replicas_in_sync, self.batch_size))
        self.data_path, self.random_status = data_path, random_status
        self.train_ds, self.steps_per_epoch, self.classes = None, 0, 0
        self.is_triplet_dataset = False
        self.default_optimizer = "adam"
        self.metrics = ["accuracy"]
        my_evals = [
            evals.eval_callback(self.basic_model,
                                ii,
                                batch_size=batch_size,
                                eval_freq=eval_freq) for ii in eval_paths
        ]
        if len(my_evals) != 0:
            my_evals[-1].save_model = os.path.splitext(save_path)[0]
        basic_callbacks = myCallbacks.basic_callbacks(checkpoint=save_path,
                                                      evals=my_evals,
                                                      lr=lr_base,
                                                      lr_decay=lr_decay,
                                                      lr_min=lr_min)
        self.my_evals = my_evals
        self.basic_callbacks = basic_callbacks
        self.my_hist = self.basic_callbacks[-2]
示例#2
0
    def __init__(
        self,
        data_path,
        save_path,
        eval_paths=[],
        basic_model=None,
        model=None,
        compile=True,
        output_wd_multiply=1,  # works ONLY for SGDW
        custom_objects={},
        batch_size=128,
        lr_base=0.001,
        lr_decay=0.05,  # lr_decay < 1 for exponential, or it's cosine decay_steps
        lr_decay_steps=0,  # lr_decay_steps < 1 for update lr on epoch, or update on every [NUM] batches, or list for ConstantDecayScheduler
        lr_min=0,
        eval_freq=1,
        random_status=0,
        dataset_cache=False,
    ):
        custom_objects.update({
            "NormDense":
            NormDense,
            "L2_decay_wdm":
            L2_decay_wdm,
            "margin_softmax":
            losses.margin_softmax,
            "MarginSoftmax":
            losses.MarginSoftmax,
            "arcface_loss":
            losses.arcface_loss,
            "ArcfaceLossT4":
            losses.ArcfaceLossT4,
            "ArcfaceLoss":
            losses.ArcfaceLoss,
            "CenterLoss":
            losses.CenterLoss,
            "batch_hard_triplet_loss":
            losses.batch_hard_triplet_loss,
            "batch_all_triplet_loss":
            losses.batch_all_triplet_loss,
            "BatchHardTripletLoss":
            losses.BatchHardTripletLoss,
            "BatchAllTripletLoss":
            losses.BatchAllTripletLoss,
        })
        self.model, self.basic_model, self.save_path = None, None, save_path
        if isinstance(model, str):
            if model.endswith(".h5") and os.path.exists(model):
                print(">>>> Load model from h5 file: %s..." % model)
                with keras.utils.custom_object_scope(custom_objects):
                    self.model = keras.models.load_model(
                        model, compile=compile, custom_objects=custom_objects)
                embedding_layer = basic_model if basic_model is not None else self.__search_embedding_layer__(
                    self.model)
                self.basic_model = keras.models.Model(
                    self.model.inputs[0],
                    self.model.layers[embedding_layer].output)
                # self.model.summary()
        elif isinstance(model, keras.models.Model):
            self.model = model
            embedding_layer = basic_model if basic_model is not None else self.__search_embedding_layer__(
                self.model)
            self.basic_model = keras.models.Model(
                self.model.inputs[0],
                self.model.layers[embedding_layer].output)
        elif isinstance(basic_model, str):
            if basic_model.endswith(".h5") and os.path.exists(basic_model):
                print(">>>> Load basic_model from h5 file: %s..." %
                      basic_model)
                with keras.utils.custom_object_scope(custom_objects):
                    self.basic_model = keras.models.load_model(
                        basic_model,
                        compile=compile,
                        custom_objects=custom_objects)
        elif isinstance(basic_model, keras.models.Model):
            self.basic_model = basic_model

        if self.basic_model == None:
            print(
                "Initialize model by:\n"
                "| basic_model                                                     | model           |\n"
                "| --------------------------------------------------------------- | --------------- |\n"
                "| model structure                                                 | None            |\n"
                "| basic model .h5 file                                            | None            |\n"
                "| None for 'embedding' layer or layer index of basic model output | model .h5 file  |\n"
                "| None for 'embedding' layer or layer index of basic model output | model structure |\n"
            )
            return

        self.softmax, self.arcface, self.triplet, self.center = "softmax", "arcface", "triplet", "center"

        self.batch_size = batch_size
        if tf.distribute.has_strategy():
            strategy = tf.distribute.get_strategy()
            self.batch_size = batch_size * strategy.num_replicas_in_sync
            print(">>>> num_replicas_in_sync: %d, batch_size: %d" %
                  (strategy.num_replicas_in_sync, self.batch_size))
        self.data_path, self.random_status = data_path, random_status
        self.train_ds, self.steps_per_epoch, self.classes = None, None, 0
        self.is_triplet_dataset = False
        self.default_optimizer = "adam"
        self.metrics = ["accuracy"]
        my_evals = [
            evals.eval_callback(self.basic_model,
                                ii,
                                batch_size=batch_size,
                                eval_freq=eval_freq) for ii in eval_paths
        ]
        if len(my_evals) != 0:
            my_evals[-1].save_model = os.path.splitext(save_path)[0]
        basic_callbacks = myCallbacks.basic_callbacks(
            checkpoint=save_path,
            evals=my_evals,
            lr=lr_base,
            lr_decay=lr_decay,
            lr_min=lr_min,
            lr_decay_steps=lr_decay_steps)
        self.my_evals = my_evals
        self.basic_callbacks = basic_callbacks
        self.my_hist = [
            ii for ii in self.basic_callbacks
            if isinstance(ii, myCallbacks.My_history)
        ][0]
        self.custom_callbacks = []
        self.output_wd_multiply = output_wd_multiply
        self.dataset_cache = dataset_cache
        self.is_distiller = False
示例#3
0
    def __init__(
        self,
        data_path,
        save_path,
        eval_paths=[],
        basic_model=None,
        model=None,
        compile=True,
        output_weight_decay=0,  # L2 regularizer for output layer, 0 for None, >=1 for value in basic_model, (0, 1) for specific value
        custom_objects={},
        batch_size=128,
        lr_base=0.001,
        lr_decay=0.05,  # for cosine it's m_mul, or it's decay_rate for exponential or constant
        lr_decay_steps=0,  # <=1 for Exponential, (1, 500) for Cosine decay on epoch, >= 500 for Cosine decay on batch, list for Constant
        lr_min=0,
        eval_freq=1,
        random_status=0,
        image_per_class=0,  # For triplet, image_per_class will be `4` if it's `< 4`
        teacher_model_interf=None,  # Teacher model to generate embedding data, used for distilling training.
    ):
        from inspect import getmembers, isfunction, isclass

        custom_objects.update(
            dict([
                ii for ii in getmembers(losses)
                if isfunction(ii[1]) or isclass(ii[1])
            ]))
        custom_objects.update({"NormDense": models.NormDense})

        self.model, self.basic_model, self.save_path, self.default_type = None, None, save_path, None
        if isinstance(model, str):
            if model.endswith(".h5") and os.path.exists(model):
                print(">>>> Load model from h5 file: %s..." % model)
                with keras.utils.custom_object_scope(custom_objects):
                    self.model = keras.models.load_model(
                        model, compile=compile, custom_objects=custom_objects)
                embedding_layer = basic_model if basic_model is not None else self.__search_embedding_layer__(
                    self.model)
                self.basic_model = keras.models.Model(
                    self.model.inputs[0],
                    self.model.layers[embedding_layer].output)
                # self.model.summary()
        elif isinstance(model, keras.models.Model):
            self.model = model
            embedding_layer = basic_model if basic_model is not None else self.__search_embedding_layer__(
                self.model)
            self.basic_model = keras.models.Model(
                self.model.inputs[0],
                self.model.layers[embedding_layer].output)
            self.default_type = "MODEL"
            print(
                ">>>> Specified model structure, output layer will keep from changing"
            )
        elif isinstance(basic_model, str):
            if basic_model.endswith(".h5") and os.path.exists(basic_model):
                print(">>>> Load basic_model from h5 file: %s..." %
                      basic_model)
                with keras.utils.custom_object_scope(custom_objects):
                    self.basic_model = keras.models.load_model(
                        basic_model,
                        compile=compile,
                        custom_objects=custom_objects)
        elif isinstance(basic_model, keras.models.Model):
            self.basic_model = basic_model

        if self.basic_model == None:
            print(
                "Initialize model by:\n"
                "| basic_model                                                     | model           |\n"
                "| --------------------------------------------------------------- | --------------- |\n"
                "| model structure                                                 | None            |\n"
                "| basic model .h5 file                                            | None            |\n"
                "| None for 'embedding' layer or layer index of basic model output | model .h5 file  |\n"
                "| None for 'embedding' layer or layer index of basic model output | model structure |\n"
            )
            return

        self.softmax, self.arcface, self.triplet, self.center, self.distill = "softmax", "arcface", "triplet", "center", "distill"
        if output_weight_decay >= 1:
            l2_weight_decay = 0
            for ii in self.basic_model.layers:
                if hasattr(ii, "kernel_regularizer") and isinstance(
                        ii.kernel_regularizer, keras.regularizers.L2):
                    l2_weight_decay = ii.kernel_regularizer.l2
                    break
            print(">>>> L2 regularizer value from basic_model:",
                  l2_weight_decay)
            output_weight_decay *= l2_weight_decay * 2
        self.output_weight_decay = output_weight_decay

        self.batch_size = batch_size
        if tf.distribute.has_strategy():
            strategy = tf.distribute.get_strategy()
            self.batch_size = batch_size * strategy.num_replicas_in_sync
            print(">>>> num_replicas_in_sync: %d, batch_size: %d" %
                  (strategy.num_replicas_in_sync, self.batch_size))
            self.data_options = tf.data.Options()
            self.data_options.experimental_distribute.auto_shard_policy = tf.data.experimental.AutoShardPolicy.DATA

        my_evals = [
            evals.eval_callback(self.basic_model,
                                ii,
                                batch_size=batch_size,
                                eval_freq=eval_freq) for ii in eval_paths
        ]
        if len(my_evals) != 0:
            my_evals[-1].save_model = os.path.splitext(save_path)[0]
        self.my_history, self.model_checkpoint, self.lr_scheduler, self.gently_stop = myCallbacks.basic_callbacks(
            checkpoint=save_path,
            evals=my_evals,
            lr=lr_base,
            lr_decay=lr_decay,
            lr_min=lr_min,
            lr_decay_steps=lr_decay_steps)
        self.my_evals, self.custom_callbacks = my_evals, []
        self.metrics = ["accuracy"]
        self.default_optimizer = "adam"

        self.data_path, self.random_status, self.image_per_class, self.teacher_model_interf = data_path, random_status, image_per_class, teacher_model_interf
        self.train_ds, self.steps_per_epoch, self.classes, self.is_triplet_dataset, self.is_distill_ds = None, None, 0, False, False
        self.distill_emb_map_layer = None