Example #1
0
    def test_accuracy(self):
        metric_container = compile_utils.MetricsContainer('accuracy')
        y_t, y_p = array_ops.ones((10, 1)), array_ops.ones((10, 1))
        metric_container.update_state(y_t, y_p)
        self.assertEqual(metric_container.metrics[0]._fn,
                         metrics_mod.binary_accuracy)

        metric_container = compile_utils.MetricsContainer('Accuracy')
        y_t, y_p = array_ops.ones((10, 1)), array_ops.ones((10, 1))
        metric_container.update_state(y_t, y_p)
        self.assertEqual(metric_container.metrics[0]._fn,
                         metrics_mod.binary_accuracy)

        metric_container = compile_utils.MetricsContainer('accuracy')
        y_t, y_p = array_ops.ones((10, 1)), array_ops.ones((10, 20))
        self.assertEqual(y_p.shape.as_list()[-1], 20)
        metric_container.update_state(y_t, y_p)
        self.assertEqual(metric_container.metrics[0]._fn,
                         metrics_mod.sparse_categorical_accuracy)

        metric_container = compile_utils.MetricsContainer('accuracy')
        y_t, y_p = array_ops.ones((10, 20)), array_ops.ones((10, 20))
        metric_container.update_state(y_t, y_p)
        self.assertEqual(metric_container.metrics[0]._fn,
                         metrics_mod.categorical_accuracy)
Example #2
0
    def test_missing_label_with_no_metrics(self):
        # It's ok to exclude a label if that label has no
        # losses or metrics associated with it.
        metric_container = compile_utils.MetricsContainer(metrics={
            'output1': 'mae',
            'output3': 'mse'
        })

        y_p = {
            'output1': ops.convert_to_tensor_v2_with_dispatch([[0], [1], [2]]),
            'output2': ops.convert_to_tensor_v2_with_dispatch([[3], [4], [5]]),
            'output3': ops.convert_to_tensor_v2_with_dispatch([[6], [7], [8]])
        }
        y_t = {
            'output1': ops.convert_to_tensor_v2_with_dispatch([[1], [2], [3]]),
            'output3': ops.convert_to_tensor_v2_with_dispatch([[4], [5], [6]])
        }

        metric_container.update_state(y_t, y_p)
        self.assertLen(metric_container.metrics, 2)

        mae_metric = metric_container.metrics[0]
        self.assertEqual(mae_metric.name, 'output1_mae')
        self.assertEqual(mae_metric.result().numpy(), 1.)

        mse_metric = metric_container.metrics[1]
        self.assertEqual(mse_metric.name, 'output3_mse')
        self.assertEqual(mse_metric.result().numpy(), 4.)
  def test_metric_dict(self):
    metric_container = compile_utils.MetricsContainer(
        metrics={
            'out1': 'mse',
            'out2': 'mae'
        },
        weighted_metrics={
            'out1': 'mse',
            'out2': 'mae'
        })

    y_t = {'out1': array_ops.ones((10, 1)), 'out2': array_ops.zeros((10, 1))}
    y_p = {'out1': array_ops.ones((10, 1)), 'out2': 2 * array_ops.ones((10, 1))}
    sw = ops.convert_to_tensor_v2([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
    metric_container.update_state(y_t, y_p, sample_weight=sw)

    mse_metric = metric_container.metrics[0]
    self.assertEqual(mse_metric.name, 'out1_mse')
    self.assertEqual(mse_metric.result().numpy(), 0.)

    weighted_mse_metric = metric_container.metrics[1]
    self.assertEqual(weighted_mse_metric.name, 'out1_weighted_mse')
    self.assertEqual(weighted_mse_metric.result().numpy(), 0.)

    mae_metric = metric_container.metrics[2]
    self.assertEqual(mae_metric.name, 'out2_mae')
    self.assertEqual(mae_metric.result().numpy(), 2.)

    weighted_mae_metric = metric_container.metrics[3]
    self.assertEqual(weighted_mae_metric.name, 'out2_weighted_mae')
    self.assertEqual(weighted_mae_metric.result().numpy(), 2.)
Example #4
0
        def compile(self, optimizers, loss_fns, metrics=None):
            super(Model_w_self_backpropagated_branches, self).compile()
            assert len(optimizers)==len(loss_fns)
            assert len(optimizers)==self.num_models

            if metrics is None:
                user_metrics = [None]*self.num_models
            elif isinstance(metrics,dict):
                user_metrics = [None]*self.num_models
                for k,v in metrics.items():
                    user_metrics[k] = v
            elif isinstance(metrics,list):
                assert len(metrics)==self.num_models
                user_metrics = metrics
            else:
                raise ValueError("Metrics must be either a full-list or"
                    " a sparse version dictionary map position to metric.")

            with self.distribute_strategy.scope():
                self._validate_compile(optimizers, user_metrics)

                self.optimizers = self._get_optimizer(optimizers)
                self.compiled_loss = [compile_utils.LossesContainer(
                    loss, output_names=self.output_names) 
                    for loss in loss_fns]
                self.compiled_metrics = [compile_utils.MetricsContainer(
                    metric, output_names=self.output_names)
                    for metric in user_metrics]
Example #5
0
    def test_list_of_metrics_list_of_outputs(self):
        metric_container = compile_utils.MetricsContainer(
            metrics=['mse', 'mae'],
            weighted_metrics=['accuracy'])  # Should broadcast to both outputs.

        y_t = [array_ops.ones((10, 1)), array_ops.zeros((10, 1))]
        y_p = [array_ops.ones((10, 1)), 2 * array_ops.ones((10, 1))]
        sw = ops.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
        metric_container.update_state(y_t, y_p, sample_weight=sw)
        self.assertLen(metric_container.metrics, 4)

        mse_metric = metric_container.metrics[0]
        self.assertEqual(mse_metric.name, 'output_1_mse')
        self.assertEqual(mse_metric.result().numpy(), 0.)

        mae_metric = metric_container.metrics[1]
        self.assertEqual(mae_metric.name, 'output_2_mae')
        self.assertEqual(mae_metric.result().numpy(), 2.)

        acc_metric_1 = metric_container.metrics[2]
        self.assertEqual(acc_metric_1.name, 'output_1_accuracy')
        self.assertEqual(acc_metric_1.result().numpy(), 1.)
        self.assertEqual(acc_metric_1._fn, metrics_mod.binary_accuracy)

        acc_metric_2 = metric_container.metrics[3]
        self.assertEqual(acc_metric_2.name, 'output_2_accuracy')
        self.assertEqual(acc_metric_2.result().numpy(), 0.)
        self.assertEqual(acc_metric_2._fn, metrics_mod.binary_accuracy)
Example #6
0
    def test_reset_state_existing_metric_before_built(self):
        metric = metrics_mod.Mean()
        metric.update_state([2.0, 4.0])
        self.assertEqual(metric.result().numpy(), 3.0)

        metric_container = compile_utils.MetricsContainer(metric)
        metric_container.reset_state()
        self.assertEqual(metric.result().numpy(), 0.0)
Example #7
0
    def test_single_metric(self):
        metric_container = compile_utils.MetricsContainer('mse')
        y_t, y_p = array_ops.ones((10, 5)), array_ops.zeros((10, 5))
        metric_container.update_state(y_t, y_p)

        self.assertLen(metric_container.metrics, 1)
        metric = metric_container.metrics[0]
        self.assertEqual(metric.name, 'mse')
        self.assertEqual(metric.result().numpy(), 1.)
  def test_broadcast_metrics_to_dict(self):
    metric_container = compile_utils.MetricsContainer(metrics=['mae'])

    y_p = {'output': ops.convert_to_tensor([[0], [1], [2]])}
    y_t = {'output': ops.convert_to_tensor([[1], [2], [3]])}
    metric_container.update_state(y_t, y_p)

    mae_metric = metric_container.metrics[0]
    self.assertEqual(mae_metric.name, 'mae')
    self.assertEqual(mae_metric.result().numpy(), 1.)
Example #9
0
    def compile(self,
                optimizer_gen='rmsprop', optimizer_disc='rmsprop',
                loss_gen=None, loss_disc=None,
                metrics_gen=None, metrics_disc=None):

        self.gen_optimizer = optimizers.get(optimizer_gen)
        self.disc_optimizer = optimizers.get(optimizer_disc)

        self.gen_losses_container = compile_utils.LossesContainer(loss_gen) if loss_gen else None
        self.disc_losses_container = compile_utils.LossesContainer(loss_disc) if loss_disc else None

        self.gen_metrics_container = compile_utils.MetricsContainer(metrics_gen) if metrics_gen else None
        self.disc_metrics_container = compile_utils.MetricsContainer(metrics_disc) if metrics_disc else None

        self.m_formatter = Metrics_Formatter(
            gen_loss_name=_get_tag_name(self.gen_losses_container),
            disc_loss_name=_get_tag_name(self.disc_losses_container),
            gen_metric_name=_get_tag_name(self.gen_metrics_container),
            disc_metric_name=_get_tag_name(self.disc_metrics_container),
            num_format='.03f'
        )
Example #10
0
    def test_loss_class_as_metric_with_distribution(self):
        distribution = one_device_strategy.OneDeviceStrategy('/device:CPU:0')
        with distribution.scope():
            metric_container = compile_utils.MetricsContainer(
                losses_mod.MeanSquaredError())
            y_t, y_p = array_ops.ones((10, 5)), array_ops.zeros((10, 5))
            metric_container.update_state(y_t, y_p)

            self.assertLen(metric_container.metrics, 1)
            metric = metric_container.metrics[0]
            self.assertEqual(metric.name, 'mean_squared_error')
            self.assertEqual(metric.result().numpy(), 1.)
Example #11
0
    def test_list_of_metrics_one_output(self):
        metric_container = compile_utils.MetricsContainer(['mse', 'mae'])
        y_t, y_p = 2 * array_ops.ones((10, 5)), array_ops.zeros((10, 5))
        metric_container.update_state(y_t, y_p)
        self.assertLen(metric_container.metrics, 2)

        mse_metric = metric_container.metrics[0]
        self.assertEqual(mse_metric.name, 'mse')
        self.assertEqual(mse_metric.result().numpy(), 4.)

        mae_metric = metric_container.metrics[1]
        self.assertEqual(mae_metric.name, 'mae')
        self.assertEqual(mae_metric.result().numpy(), 2.)
Example #12
0
    def test_metric_partial_dict_with_output_names(self):
        metric_container = compile_utils.MetricsContainer(
            {'out2': 'mae'}, output_names=['out1', 'out2'])

        y_t = [array_ops.ones((10, 1)), array_ops.zeros((10, 1))]
        y_p = [array_ops.ones((10, 1)), array_ops.ones((10, 1))]
        sw = ops.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])

        metric_container.update_state(y_t, y_p, sample_weight=sw)
        self.assertLen(metric_container.metrics, 1)

        mae_metric = metric_container.metrics[0]
        self.assertEqual(mae_metric.name, 'out2_mae')
        self.assertEqual(mae_metric.result().numpy(), 1.)
Example #13
0
    def test_custom_metric_callables(self):
        def custom_metric_fn(y_true, y_pred):
            return math_ops.reduce_sum(y_true - y_pred)

        class CustomMetricClass(object):
            def __call__(self, y_true, y_pred):
                return math_ops.reduce_sum(y_true - y_pred)

        metric_container = compile_utils.MetricsContainer(
            [custom_metric_fn, CustomMetricClass()])
        y_t, y_p = array_ops.ones((10, 5)), array_ops.zeros((10, 5))
        metric_container.update_state(y_t, y_p)

        self.assertEqual(metric_container.metrics[0].name, 'custom_metric_fn')
        self.assertEqual(metric_container.metrics[1].name,
                         'custom_metric_class')
Example #14
0
  def test_metric_partial_dict_with_nones(self):
    metric_container = compile_utils.MetricsContainer({
        'out1': None,
        'out2': 'mae'
    })

    y_t = {'out1': array_ops.ones((10, 1)), 'out2': array_ops.zeros((10, 1))}
    y_p = {'out1': array_ops.ones((10, 1)), 'out2': array_ops.ones((10, 1))}
    sw = ops.convert_to_tensor_v2_with_dispatch([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])

    metric_container.update_state(y_t, y_p, sample_weight=sw)
    self.assertLen(metric_container.metrics, 1)

    mae_metric = metric_container.metrics[0]
    self.assertEqual(mae_metric.name, 'out2_mae')
    self.assertEqual(mae_metric.result().numpy(), 1.)
  def test_metrics_sample_weight(self):
    metrics_container = compile_utils.MetricsContainer(
        metrics=['mae'], weighted_metrics=['mse'])
    y_p = constant_op.constant([[[1], [1]], [[0], [1]]], dtype=dtypes.float32)
    y_t = constant_op.constant([[[1], [1]], [[1], [1]]], dtype=dtypes.float32)
    sw = constant_op.constant([[.2, .3], [.5, 0]], dtype=dtypes.float32)

    metrics_container.update_state(y_t, y_p, sample_weight=sw)
    self.assertLen(metrics_container.metrics, 2)

    mae_metric = metrics_container.metrics[0]
    self.assertEqual(mae_metric.name, 'mae')
    self.assertAlmostEqual(mae_metric.result().numpy(), .25)  # 1 / 4

    weighted_mae_metric = metrics_container.metrics[1]
    self.assertEqual(weighted_mae_metric.name, 'mse')
    self.assertAlmostEqual(weighted_mae_metric.result().numpy(), .5)  # .5 / 1
Example #16
0
    def test_metric_weighting(self):
        metric_container = compile_utils.MetricsContainer(
            metrics=['mae'], weighted_metrics=['mae'])

        y_t = ops.convert_to_tensor_v2_with_dispatch([[0], [3], [0]])
        y_p = ops.convert_to_tensor_v2_with_dispatch([[0], [0], [0]])
        sw = ops.convert_to_tensor_v2_with_dispatch([[1], [0], [1]])

        metric_container.update_state(y_t, y_p, sample_weight=sw)
        self.assertLen(metric_container.metrics, 2)

        mae_metric = metric_container.metrics[0]
        self.assertEqual(mae_metric.name, 'mae')
        self.assertEqual(mae_metric.result().numpy(), 1.)

        weighted_mae_metric = metric_container.metrics[1]
        self.assertEqual(weighted_mae_metric.name, 'weighted_mae')
        self.assertEqual(weighted_mae_metric.result().numpy(), 0.)
  def test_metrics_masking(self):
    metrics_container = compile_utils.MetricsContainer(
        metrics=['mae'], weighted_metrics=['mse'])
    y_p = constant_op.constant([[[1], [1]], [[0], [0]]], dtype=dtypes.float32)
    y_t = constant_op.constant([[[1], [1]], [[1], [1]]], dtype=dtypes.float32)
    y_p._keras_mask = constant_op.constant([[1, 1], [0, 0]],
                                           dtype=dtypes.float32)

    metrics_container.update_state(y_t, y_p)
    self.assertLen(metrics_container.metrics, 2)

    mae_metric = metrics_container.metrics[0]
    self.assertEqual(mae_metric.name, 'mae')
    self.assertAlmostEqual(mae_metric.result().numpy(), 0)

    weighted_mae_metric = metrics_container.metrics[1]
    self.assertEqual(weighted_mae_metric.name, 'mse')
    self.assertAlmostEqual(weighted_mae_metric.result().numpy(), 0)
Example #18
0
    def test_nested_structure(self):
        metric_container = compile_utils.MetricsContainer(metrics={
            'b': ['mse', None],
            'a': 'mae'
        },
                                                          weighted_metrics={
                                                              'b':
                                                              [None, None],
                                                              'a': 'mse'
                                                          })

        y_t = {
            'b': [2 * array_ops.ones((10, 1)),
                  array_ops.zeros((10, 1))],
            'a': array_ops.zeros((10, 1))
        }
        y_p = {
            'b': [array_ops.zeros((10, 1)),
                  array_ops.zeros((10, 1))],
            'a': array_ops.ones((10, 1))
        }
        sw = ops.convert_to_tensor_v2_with_dispatch(
            [0, 0, 0, 0, 0, 1, 1, 1, 1, 1])

        metric_container.update_state(y_t, y_p, sample_weight=sw)
        self.assertLen(metric_container.metrics, 3)

        a_mae_metric = metric_container.metrics[0]
        self.assertEqual(a_mae_metric.name, 'a_mae')
        self.assertEqual(a_mae_metric.result().numpy(), 1.)

        weighted_a_mae_metric = metric_container.metrics[1]
        self.assertEqual(weighted_a_mae_metric.name, 'a_mse')
        self.assertEqual(weighted_a_mae_metric.result().numpy(), 1.)

        b_1_mse_metric = metric_container.metrics[2]
        self.assertEqual(b_1_mse_metric.name, 'b_1_mse')
        self.assertEqual(b_1_mse_metric.result().numpy(), 4.)