Esempio n. 1
0
def get_metric_class(
    metric: Union[str, metrics_mod.Metric, Type[metrics_mod.Metric]]
) -> Union[metrics_mod.Metric, str]:
    if metric in ("acc", "accuracy", "ce", "crossentropy"):
        # Keras matches "acc" and others in this list to the right function
        # based on the Model's loss function, output shape, etc.
        # We pass them through here to let Keras deal with these.
        return metric
    return metrics_mod.get(metric)  # always returns a class
Esempio n. 2
0
    def compile_model(self,
                      optimizer,
                      optimizer_kwargs,
                      loss,
                      metrics,
                      target_tensors=None,
                      **kwargs):
        # Initialize optimizer
        optimizer = optimizers.__dict__[optimizer]
        optimizer = optimizer(**optimizer_kwargs)

        # Make sure sparse metrics and loss are specified sparse
        metrics = ensure_list_or_tuple(metrics)
        loss = ensure_list_or_tuple(loss)
        for i, m in enumerate(metrics + loss):
            if "sparse" not in m:
                raise_non_sparse_metric_or_loss_error()

        # Initialize loss(es)
        loss_list = []
        for l in loss:
            if l in losses.__dict__:
                loss_list.append(losses.get(l))
            else:
                import inspect
                l = loss_functions.__dict__[l]
                if inspect.isclass(l):
                    loss_list.append(l(logger=self.logger, **kwargs))
                else:
                    loss_list.append(l)
        loss = loss_list

        # Find metrics both from standard keras.metrics module and own custom
        init_metrics = []
        for m in metrics:
            if m in TF_metrics.__dict__:
                init_metrics.append(TF_metrics.get(m))
            else:
                import inspect
                metric = custom_metrics.__dict__[m]
                if inspect.isclass(metric):
                    metric = metric(logger=self.logger, **kwargs)
                init_metrics.append(metric)

        # Compile the model
        self.model.compile(optimizer=optimizer,
                           loss=loss,
                           metrics=init_metrics,
                           target_tensors=target_tensors)
        self.logger("Optimizer:   %s" % optimizer)
        self.logger("Loss funcs:  %s" % loss)
        self.logger("Metrics:     %s" % init_metrics)
        if target_tensors is not None:
            self.target_tensor = True
        return self
Esempio n. 3
0
    def compile_model(self,
                      optimizer,
                      optimizer_kwargs,
                      loss,
                      metrics,
                      sparse=False,
                      mem_logging=False,
                      **kwargs):
        # Initialize optimizer
        optimizer = optimizers.__dict__[optimizer]
        optimizer = optimizer(**optimizer_kwargs)

        # Initialize loss
        if loss in losses.__dict__:
            loss = losses.get(loss)
        else:
            import inspect
            loss = loss_functions.__dict__[loss]
            if inspect.isclass(loss):
                loss = loss(logger=self.logger, **kwargs)

        if sparse:
            # Make sure sparse metrics are specified
            for i, m in enumerate(metrics):
                if "sparse" not in m:
                    new = "sparse_" + m
                    self.logger("Note: changing %s --> "
                                "%s (sparse=True passed)" % (m, new))
                    metrics[i] = new

        # Find metrics both from standard keras.metrics module and own custom
        init_metrics = []
        for m in metrics:
            if m in TF_metrics.__dict__:
                init_metrics.append(TF_metrics.get(m))
            else:
                import inspect
                metric = custom_metrics.__dict__[m]
                if inspect.isclass(metric):
                    metric = metric(logger=self.logger, **kwargs)
                init_metrics.append(metric)

        # Compile the model
        self.model.compile(optimizer=optimizer,
                           loss=loss,
                           metrics=init_metrics)

        self.logger("Optimizer:   %s" % optimizer)
        self.logger("Loss:        %s" % loss)
        self.logger("Targets:     %s" % ("Integer" if sparse else "One-Hot"))
        self.logger("Metrics:     %s" % init_metrics)

        return self
Esempio n. 4
0
def get_model(input_shape=(256, 256, 3)):
    inputs = layers.Input(shape=input_shape)  # 256
    decoder0 = u_net_block(inputs)
    outputs = layers.Conv2D(1, (1, 1), activation='sigmoid')(decoder0)

    model = models.Model(inputs=[inputs], outputs=[outputs])

    model.compile(
        optimizer=optimizers.get(OPTIMIZER),
        loss=losses.get(LOSS),
        metrics=[metrics.get(metric) for metric in METRICS])

    return model
Esempio n. 5
0
def get_siamese_model(input_shape=(256, 256, 3)):
    inputs = layers.Input(shape=input_shape)  # 256
    block0 = u_net_block(inputs)
    block1 = u_net_block(inputs)
    decoder_siamese = layers.concatenate([block0, block1], axis=-1)
    outputs = layers.Conv2D(1, (1, 1), activation='sigmoid')(decoder_siamese)

    model = models.Model(inputs=[inputs], outputs=[outputs])

    model.compile(
        optimizer=optimizers.get(OPTIMIZER),
        loss=losses.get(LOSS),
        metrics=[metrics.get(metric) for metric in METRICS])

    return model
Esempio n. 6
0
def _get_metric_name(name):
    """
    Gives the keras name for a metric

    Parameters
    ----------
    name : str
        original name of the metric
    Returns
    -------

    """
    if name == 'acc' or name == 'accuracy':
        return 'accuracy'
    try:
        metric_fn = metrics.get(name)
        return metric_fn.__name__
    except:
        pass
    return name
Esempio n. 7
0
def test_metrics_uncompilable():
    """Tests that a TypeError is raised when a metric
    that is not compilable is passed routed parameters.
    """

    X, y = make_classification()

    metrics = [
        metrics_module.get("accuracy"),
    ]  # a function

    est = KerasClassifier(
        model=get_model,
        loss="binary_crossentropy",
        metrics=metrics,
        metrics__name="custom_name",
    )
    with pytest.raises(
            TypeError,
            match="does not accept parameters because it's not a class"):
        est.fit(X, y)