Ejemplo n.º 1
0
 def test_log_loss_config(self):
     config_dict = {
         'input_layer': 'images',
         'output_layer': 'relu_1',
         'epsilon': 0.0001,
         'weights': 1.0,
         'name': 'l',
         'collect': False
     }
     config = LogLossConfig.from_dict(config_dict)
     self.assert_equal_losses(config.to_dict(), config_dict)
Ejemplo n.º 2
0
    def test_return_estimator_spec(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss=LogLossConfig(),
                          optimizer=AdadeltaConfig(),
                          model_type=BaseModel.Types.CLASSIFIER,
                          metrics=[],
                          summaries=['learning_rate'],
                          name='test')

        assert isinstance(model(x, y, None, None), EstimatorSpec)
Ejemplo n.º 3
0
    def test_handle_predict_mode(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.PREDICT,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss=LogLossConfig(),
                          optimizer=AdadeltaConfig(),
                          model_type=BaseModel.Types.CLASSIFIER,
                          metrics=[],
                          summaries=['learning_rate'],
                          name='test')
        specs = model(x, y, None, None)

        assert specs.loss is None
        assert specs.predictions is not None
        assert 'losses' not in specs.predictions
        assert specs.train_op is None
Ejemplo n.º 4
0
    def test_build_no_summaries(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss=LogLossConfig(),
                          optimizer=AdadeltaConfig(),
                          model_type=BaseModel.Types.CLASSIFIER,
                          metrics=[],
                          summaries=[],
                          name='test')

        model(x, y, None, None)

        # Only activations are created
        summaries_by_names = get_tracked(
            collection=tf.GraphKeys.SUMMARIES_BY_NAMES)
        assert summaries_by_names == {}
Ejemplo n.º 5
0
    def test_does_not_build_learning_rate_summaries_if_no_decay(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss=LogLossConfig(),
                          optimizer=AdadeltaConfig(),
                          model_type=BaseModel.Types.CLASSIFIER,
                          metrics=[],
                          summaries=['learning_rate'],
                          name='test')

        model(x, y, None, None)

        # Only var are created
        summaries_names = list(
            get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys())
        assert len(summaries_names) == 0
Ejemplo n.º 6
0
    def test_build_variables_summaries(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss=LogLossConfig(),
                          optimizer=AdadeltaConfig(),
                          model_type=BaseModel.Types.CLASSIFIER,
                          metrics=[],
                          summaries=['variables'],
                          name='test')

        model(x, y, None, None)

        # Only var are created
        variable_names = {var.op.name for var in tf.trainable_variables()}
        summaries_names = set(
            get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys())
        assert variable_names == summaries_names
Ejemplo n.º 7
0
    def test_build_all_summaries(self):
        training.create_global_step()
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(
            plx.Modes.TRAIN,
            graph_fn=self.get_dummy_graph_fn(),
            loss=LogLossConfig(),
            optimizer=AdadeltaConfig(decay_type='exponential_decay'),
            model_type=BaseModel.Types.CLASSIFIER,
            metrics=[],
            summaries='all',
            name='test')

        model(x, y, None, None)

        # Only var are created
        learning_rate_summaries = 0
        activations_summaries = 0
        gradients_summaries = 0
        loss_summaries = 0

        for s_name in get_tracked(
                collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys():
            if 'learning_rate' in s_name:
                learning_rate_summaries += 1
            elif 'Activation' in s_name:
                activations_summaries += 1
            elif 'Loss' in s_name:
                loss_summaries += 1
            elif 'Gradient' in s_name:
                gradients_summaries += 1

        assert learning_rate_summaries > 0
        assert activations_summaries > 0
        assert gradients_summaries > 0
        assert loss_summaries > 0