Ejemplo n.º 1
0
    def setup_trainer(config_data: dict, model: PEModel, training_paf: tuple,
                      training_heatmap: tuple, gen_layer):
        paf, paf_mask = training_paf
        heatmap, heatmap_mask = training_heatmap
        iterator = gen_layer.get_iterator()
        absent_human_masks = iterator[RIterator.ABSENT_HUMAN_MASK]
        trainer = TrainerBuilder.trainer_from_dict(
            model=model,
            train_inputs=[gen_layer],
            label_tensors={
                PETrainer.TRAINING_HEATMAP: heatmap.get_data_tensor(),
                PETrainer.TRAINING_PAF: paf.get_data_tensor(),
                PETrainer.TRAINING_MASK: absent_human_masks,
                PETrainer.TRAINING_HEATMAP_MASK:
                heatmap_mask.get_data_tensor(),
                PETrainer.TRAINING_PAF_MASK: paf_mask.get_data_tensor()
            },
            info_dict=config_data[ModelAssembler.TRAINER_INFO])

        untrainable_layers = config_data[ModelAssembler.UNTRAINABLE_LAYERS]
        if untrainable_layers is not None:
            layers = []
            for layer_name in untrainable_layers:
                layers += [(layer_name, False)]
            trainer.set_layers_trainable(layers)

        # Set l1 regularization
        l1_reg = config_data[ModelAssembler.L1_REG]
        if l1_reg is not None:
            l1_reg = float(l1_reg)
            l1_reg_layers = config_data[ModelAssembler.L1_REG_LAYERS]
            reg_config = [(layer, l1_reg) for layer in l1_reg_layers]
            trainer.set_l1_reg(reg_config)

        # Set l2 regularization
        l2_reg = config_data[ModelAssembler.L2_REG]
        if l2_reg is not None:
            l2_reg = float(l2_reg)
            l2_reg_layers = config_data[ModelAssembler.L2_REG_LAYERS]
            reg_config = [(layer, l2_reg) for layer in l2_reg_layers]
            trainer.set_l2_reg(reg_config)

        distillation_config = config_data.get(ModelAssembler.DISTILLATION)
        if distillation_config is not None:
            arch_path = distillation_config[ModelAssembler.TEACHER_ARCH]
            teacher = PEModel.from_json(arch_path)
            teacher.set_session(model.get_session())

            weights_path = distillation_config[ModelAssembler.TEACHER_WEIGHTS]
            teacher.load_weights(weights_path)

            distillator = DistillatorBuilder.distillator_from_dict(
                teacher=teacher, info_dict=distillation_config)
            trainer = distillator(trainer)

        trainer.compile()
        return trainer
Ejemplo n.º 2
0
    def build_trainer(self, config_data: dict, model, gen_layer):
        iterator = gen_layer.get_iterator()
        # TODO: Label tensor - tensors from iterator - how connect different models???
        trainer = TrainerBuilder.trainer_from_dict(
            model=model,
            train_inputs=[gen_layer],
            label_tensors={
                "LABELS": iterator['mask'],
                "WEIGHT_MAP": None
            },
            info_dict=config_data[ModelAssemblerBase.TRAINER_INFO])

        return trainer
Ejemplo n.º 3
0
if __name__ == '__main__':
    from makiflow.models.classificator import Classificator
    from makiflow.layers import InputLayer

    # SEGMENTATION CASE
    print('SEGMENTATION CASE------------------------------------------------------------------------------------------')
    x = InputLayer(input_shape=[32, 128, 128, 3], name='input')

    model = Classificator(in_x=x, out_x=x)
    TrainerBuilder.trainer_from_dict(
        model,
        None,
        None,
        {
            "type": "DiceTrainer",
            "params": {
                "axes": [1, 2, 3],
                "eps": 0.000001
            }
        }
    )
    trainer = DiceTrainer(model=model, train_inputs=[x])

    print('LABELS TENSORS:', trainer.get_label_tensors())
    trainer.compile()
    print('LOSS TENSORS:', trainer.get_track_losses())

    # VANILLA CLASSIFICATION CASE
    print('VANILLA CLASSIFICATION CASE--------------------------------------------------------------------------------')
    x = InputLayer(input_shape=[32, 3], name='input')
    model = Classificator(in_x=x, out_x=x)