Exemple #1
0
    def _build_loss(self, results, features, labels):
        """Creates the loss operation

        Returns:
             tuple `(losses, loss)`:
                `losses` are the per-batch losses.
                `loss` is a single scalar tensor to minimize.
        """
        # losses, loss = getters.get_loss(
        #     self.loss_config.module, results.results, labels, **self.loss_config.params)
        # if results.loss is not None:
        #     loss += results.loss
        # if results.losses is not None:
        #     losses += results.losses

        loss = results.loss
        losses = results.losses
        self._loss = loss
        self._losses = losses

        other_losses = get_tracked(tf.GraphKeys.REGULARIZATION_LOSSES)
        if other_losses:
            loss = [loss] + other_losses
            loss = tf.add_n(loss, name="TotalLoss")
            self._total_loss = loss
        return losses, loss
Exemple #2
0
    def _build_summary_op(self, results=None, features=None, labels=None):
        """Builds summaries for this model.

        The summaries are one value (or more) of:
            * (`ACTIVATIONS`, `VARIABLES`, `GRADIENTS`, `LOSS`, `LEARNING_RATE`)
        """
        summary_op = []
        for summary in self.summaries:
            if summary == summarizer.SummaryOptions.ACTIVATIONS:
                activations = get_tracked(tf.GraphKeys.ACTIVATIONS)
                summary_op += summarizer.add_activations_summary(activations)
            elif summary == summarizer.SummaryOptions.VARIABLES:
                variables = tf.trainable_variables()
                summary_op += summarizer.add_trainable_vars_summary(variables)
            elif summary == summarizer.SummaryOptions.GRADIENTS and self._clip_gradients > 0.0:
                summary_op += summarizer.add_gradients_summary(self._grads_and_vars)
            elif summary == summarizer.SummaryOptions.LOSS:
                summary_op += summarizer.add_loss_summaries(self._total_loss, self._loss)
            elif summary == summarizer.SummaryOptions.LEARNING_RATE:
                summary_op += summarizer.add_learning_rate_summaries()
            elif summary == summarizer.SummaryOptions.IMAGE_INPUT:
                summary_op += summarizer.add_image_summary(features, op_name='inputs')
            elif summary == summarizer.SummaryOptions.IMAGE_RESULT:
                summary_op += summarizer.add_image_summary(results, op_name='results')

        # no need to tf.summary.merge(summary_op), for now we merge all at hook level
        return summary_op
Exemple #3
0
    def test_build_learning_rate_summaries(self):
        training.create_global_step()
        config = ModelConfig(loss_config=LossConfig(name='log_loss'),
                             optimizer_config=OptimizerConfig(
                                 name='Adadelta',
                                 decay_type='exponential_decay'))

        x = {'source_ids': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.ModeKeys.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          config=config,
                          model_type=BaseModel.Types.CLASSIFIER,
                          summaries=['learning_rate'],
                          name='test',
                          params=None)

        model(x, y, None, None)

        # Only var are created
        summaries_names = list(
            get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys())
        assert len(summaries_names) == 1
        assert summaries_names[0] == 'learning_rate'
    def test_build_all_summaries(self):
        training.create_global_step()
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(),
                          loss_config=LossConfig(module='log_loss'),
                          optimizer_config=OptimizerConfig(module='adadelta',
                                                           decay_type='exponential_decay'),
                          model_type=BaseModel.Types.CLASSIFIER, eval_metrics_config=[],
                          summaries='all', name='test')

        model(x, y, None, None)

        # Only var are created
        learning_rate_summaries = 0
        activations_summaries = 0
        gradients_summaries = 0
        loss_summaries = 0

        for s_name in get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys():
            if 'learning_rate' in s_name:
                learning_rate_summaries += 1
            elif 'Activation' in s_name:
                activations_summaries += 1
            elif 'Loss' in s_name:
                loss_summaries += 1
            elif 'Gradient' in s_name:
                gradients_summaries += 1

        assert learning_rate_summaries > 0
        assert activations_summaries > 0
        assert gradients_summaries > 0
        assert loss_summaries > 0
Exemple #5
0
    def _build_summary_op(self, results=None, features=None, labels=None):
        """Builds summaries for this model.

        The summaries are one value (or more) of:
            * (`ACTIVATIONS`, `VARIABLES`, `GRADIENTS`, `LOSS`, `LEARNING_RATE`)
        """
        summary_op = []
        for summary in self.summaries:
            if summary == summarizer.SummaryOptions.ACTIVATIONS:
                activations = get_tracked(tf.GraphKeys.ACTIVATIONS)
                summary_op += summarizer.add_activations_summary(activations)
            elif summary == summarizer.SummaryOptions.VARIABLES:
                variables = tf.trainable_variables()
                summary_op += summarizer.add_trainable_vars_summary(variables)
            elif summary == summarizer.SummaryOptions.GRADIENTS and self._clip_gradients > 0.0:
                summary_op += summarizer.add_gradients_summary(
                    self._grads_and_vars)
            elif summary == summarizer.SummaryOptions.LOSS:
                summary_op += summarizer.add_loss_summaries(
                    self._total_loss, self._loss)
            elif summary == summarizer.SummaryOptions.LEARNING_RATE:
                summary_op += summarizer.add_learning_rate_summaries()
            elif summary == summarizer.SummaryOptions.IMAGE_INPUT:
                summary_op += summarizer.add_image_summary(features,
                                                           op_name='inputs')
            elif summary == summarizer.SummaryOptions.IMAGE_RESULT:
                summary_op += summarizer.add_image_summary(results,
                                                           op_name='results')

        # no need to tf.summary.merge(summary_op), for now we merge all at hook level
        return summary_op
Exemple #6
0
    def _build_loss(self, results, features, labels):
        """Creates the loss operation

        Returns:
             tuple `(losses, loss)`:
                `losses` are the per-batch losses.
                `loss` is a single scalar tensor to minimize.
        """
        reward, action, done, = labels['reward'], labels['action'], labels[
            'done']
        discount_reward = labels['discount_reward']

        log_probs = self._graph_results.distribution.log_prob(action)
        losses = tf.multiply(x=log_probs, y=discount_reward)
        loss = -tf.reduce_mean(losses, axis=0, name='loss')

        self._losses = losses
        self._loss = loss

        other_losses = get_tracked(tf.GraphKeys.REGULARIZATION_LOSSES)
        if other_losses:
            loss = [loss] + other_losses
            loss = tf.add_n(loss, name="TotalLoss")
            self._total_loss = loss
        return losses, loss
Exemple #7
0
def add_learning_rate_summaries():
    learning_rate = get_tracked(tf.GraphKeys.LEARNING_RATE)
    if not learning_rate:
        return []

    return [
        get_summary(SummaryTypes.SCALAR, 'learning_rate', learning_rate[0])
    ]
Exemple #8
0
def add_learning_rate_summaries():
    """Adds learning rate summaries. Only works when decaying learning rate is chosen."""
    learning_rate = get_tracked(tf.GraphKeys.LEARNING_RATE)
    if not learning_rate:
        return []

    return [
        get_summary(SummaryTypes.SCALAR, 'learning_rate', learning_rate[0])
    ]
Exemple #9
0
def add_exploration_rate_summaries():
    """Adds exploration rate summaries. Only works when decaying exploration rate is chosen."""
    exploration_rate = get_tracked(tf.GraphKeys.EXPLORATION_RATE)
    if not exploration_rate:
        return []

    return [
        get_summary(SummaryTypes.SCALAR, 'exploration_rate',
                    exploration_rate[0])
    ]
Exemple #10
0
def _summary_for_name(name):
    """Gets a summary for a given name.

    Args:
        name: `str`. The summary name.

    Returns:
        The summary if it exists or `None` otherwise

    """
    return get_tracked(tf.GraphKeys.SUMMARIES_BY_NAMES).get(name)
    def test_build_no_summaries(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(),
                          loss_config=LossConfig(module='log_loss'),
                          optimizer_config=OptimizerConfig(module='adadelta'),
                          model_type=BaseModel.Types.CLASSIFIER, eval_metrics_config=[],
                          summaries=[], name='test')

        model(x, y, None, None)

        # Only activations are created
        summaries_by_names = get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES)
        assert summaries_by_names == {}
    def test_does_not_build_learning_rate_summaries_if_no_decay(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(),
                          loss_config=LossConfig(module='log_loss'),
                          optimizer_config=OptimizerConfig(module='adadelta'),
                          model_type=BaseModel.Types.CLASSIFIER, eval_metrics_config=[],
                          summaries=['learning_rate'], name='test')

        model(x, y, None, None)

        # Only var are created
        summaries_names = list(get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys())
        assert len(summaries_names) == 0
    def test_build_variables_summaries(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(),
                          loss_config=LossConfig(module='log_loss'),
                          optimizer_config=OptimizerConfig(module='adadelta'),
                          model_type=BaseModel.Types.CLASSIFIER, eval_metrics_config=[],
                          summaries=['variables'], name='test')

        model(x, y, None, None)

        # Only var are created
        variable_names = {var.op.name for var in tf.trainable_variables()}
        summaries_names = set(get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys())
        assert variable_names == summaries_names
Exemple #14
0
 def get_variables(self, collection=tf.GraphKeys.TRAINABLE_VARIABLES):
     """Returns tuple of `tf.Variable`s declared inside this module.
     Note that this operates by searching this module's variable scope,
     and so does not know about any modules that were constructed elsewhere but
     used inside this module.
     Args:
       collection: Collection to restrict query to. By default this is
         `tf.Graphkeys.TRAINABLE_VARIABLES`, which doesn't include non-trainable
         variables such as moving averages.
     Returns:
       A tuple of `tf.Variable` objects.
     Raises:
       NotConnectedError: If the module is not connected to the Graph.
     """
     return get_tracked(collection=collection,
                        scope=self.scope_name() + '/')
Exemple #15
0
    def test_build_no_summaries(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss_config=LossConfig(module='log_loss'),
                          optimizer_config=OptimizerConfig(module='adadelta'),
                          model_type=BaseModel.Types.CLASSIFIER,
                          eval_metrics_config=[],
                          summaries=[],
                          name='test')

        model(x, y, None, None)

        # Only activations are created
        summaries_by_names = get_tracked(
            collection=tf.GraphKeys.SUMMARIES_BY_NAMES)
        assert summaries_by_names == {}
    def _get_summary_op(self):
        """Fetches the summary op either from self._summary_op or tf.GraphKeys.EPISODE_SUMMARIES.

        Returns:
          Returns a list of summary `Tensor`.
        """
        if self._summary_op is not None:
            summary_op = self._summary_op
        elif self._scaffold.summary_op is not None:
            summary_op = self._scaffold.summary_op
        else:
            summary_op = get_tracked(tf.GraphKeys.EPISODE_SUMMARIES)

        if summary_op is None:
            return None

        if not isinstance(summary_op, list):
            return [summary_op]
        return summary_op
Exemple #17
0
    def _build_loss(self, results, features, labels):
        """Creates the loss operation

        Returns:
             tuple `(losses, loss)`:
                `losses` are the per-batch losses.
                `loss` is a single scalar tensor to minimize.
        """
        losses, loss = getters.get_loss(
            self.loss_config.module, results, labels, **self.loss_config.params)
        self._loss = loss
        self._losses = losses

        other_losses = get_tracked(tf.GraphKeys.REGULARIZATION_LOSSES)
        if other_losses:
            loss = [loss] + other_losses
            loss = tf.add_n(loss, name="TotalLoss")
            self._total_loss = loss
        return losses, loss
Exemple #18
0
    def test_does_not_build_learning_rate_summaries_if_no_decay(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss_config=LossConfig(module='log_loss'),
                          optimizer_config=OptimizerConfig(module='adadelta'),
                          model_type=BaseModel.Types.CLASSIFIER,
                          eval_metrics_config=[],
                          summaries=['learning_rate'],
                          name='test')

        model(x, y, None, None)

        # Only var are created
        summaries_names = list(
            get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys())
        assert len(summaries_names) == 0
Exemple #19
0
    def _build_loss(self, results, features, labels):
        """Creates the loss operation

        Returns:
             tuple `(losses, loss)`:
                `losses` are the per-batch losses.
                `loss` is a single scalar tensor to minimize.
        """
        losses, loss = getters.get_loss(self.loss.IDENTIFIER, results, labels,
                                        **self.loss.to_dict())
        self._loss = loss
        self._losses = losses

        other_losses = get_tracked(tf.GraphKeys.REGULARIZATION_LOSSES)
        if other_losses:
            loss = [loss] + other_losses
            loss = tf.add_n(loss, name="TotalLoss")
            self._total_loss = loss
        return losses, loss
    def _get_summary_op(self):
        """Fetches the summary op either from self._summary_op or tf.GraphKeys.EPISODE_SUMMARIES.

        Returns:
          Returns a list of summary `Tensor`.
        """
        if self._summary_op is not None:
            summary_op = self._summary_op
        elif self._scaffold.summary_op is not None:
            summary_op = self._scaffold.summary_op
        else:
            summary_op = get_tracked(tf.GraphKeys.EPISODE_SUMMARIES)

        if summary_op is None:
            return None

        if not isinstance(summary_op, list):
            return [summary_op]
        return summary_op
Exemple #21
0
    def test_build_variables_summaries(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss_config=LossConfig(module='log_loss'),
                          optimizer_config=OptimizerConfig(module='adadelta'),
                          model_type=BaseModel.Types.CLASSIFIER,
                          eval_metrics_config=[],
                          summaries=['variables'],
                          name='test')

        model(x, y, None, None)

        # Only var are created
        variable_names = {var.op.name for var in tf.trainable_variables()}
        summaries_names = set(
            get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys())
        assert variable_names == summaries_names
Exemple #22
0
    def test_build_gradients_summaries(self):
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.Modes.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss=LogLossConfig(),
                          optimizer=AdadeltaConfig(),
                          model_type=BaseModel.Types.CLASSIFIER,
                          metrics=[],
                          summaries=['gradients'],
                          name='test')

        model(x, y, None, None)

        # Only gradients are created
        summaries_by_names = get_tracked(
            collection=tf.GraphKeys.SUMMARIES_BY_NAMES)
        for s_name in summaries_by_names.keys():
            assert 'Gradient' in s_name
Exemple #23
0
    def _build_summary_op(self):
        summary_op = []
        for summary in self.summaries:
            if summary == summarizer.SummaryOptions.ACTIVATIONS:
                activations = get_tracked(tf.GraphKeys.ACTIVATIONS)
                summary_op += summarizer.add_activations_summary(activations)
            elif summary == summarizer.SummaryOptions.VARIABLES:
                variables = tf.trainable_variables()
                summary_op += summarizer.add_trainable_vars_summary(variables)
            elif summary == summarizer.SummaryOptions.GRADIENTS:
                summary_op += summarizer.add_gradients_summary(
                    self._grads_and_vars)
            elif summary == summarizer.SummaryOptions.LOSS:
                summary_op += summarizer.add_loss_summaries(
                    self._total_loss, self._loss)
            elif summary == summarizer.SummaryOptions.LEARNING_RATE:
                summary_op += summarizer.add_learning_rate_summaries()

        if summary_op:
            tf.summary.merge(summary_op)
Exemple #24
0
    def test_build_all_summaries(self):
        training.create_global_step()
        config = ModelConfig(loss_config=LossConfig(name='log_loss'),
                             optimizer_config=OptimizerConfig(
                                 name='Adadelta',
                                 decay_type='exponential_decay'))

        x = {'source_ids': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.ModeKeys.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          config=config,
                          model_type=BaseModel.Types.CLASSIFIER,
                          summaries='all',
                          name='test',
                          params=None)

        model(x, y, None, None)

        # Only var are created
        learning_rate_summaries = 0
        activations_summaries = 0
        gradients_summaries = 0
        loss_summaries = 0

        for s_name in get_tracked(
                collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys():
            if 'learning_rate' in s_name:
                learning_rate_summaries += 1
            elif 'Activation' in s_name:
                activations_summaries += 1
            elif 'Loss' in s_name:
                loss_summaries += 1
            elif 'Gradient' in s_name:
                gradients_summaries += 1

        assert learning_rate_summaries > 0
        assert activations_summaries > 0
        assert gradients_summaries > 0
        assert loss_summaries > 0
Exemple #25
0
    def test_build_no_summaries(self):
        config = ModelConfig(loss_config=LossConfig(name='log_loss'),
                             optimizer_config=OptimizerConfig(name='Adadelta'))

        x = {'source_ids': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.ModeKeys.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          config=config,
                          model_type=BaseModel.Types.CLASSIFIER,
                          summaries=[],
                          name='test',
                          params=None)

        model(x, y, None, None)

        # Only activations are created
        summaries_by_names = get_tracked(
            collection=tf.GraphKeys.SUMMARIES_BY_NAMES)
        assert summaries_by_names == {}
Exemple #26
0
    def test_build_loss_summaries(self):
        x = {'source_ids': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(plx.ModeKeys.TRAIN,
                          graph_fn=self.get_dummy_graph_fn(),
                          loss_config=LossConfig(name='log_loss'),
                          optimizer_config=OptimizerConfig(name='Adadelta'),
                          model_type=BaseModel.Types.CLASSIFIER,
                          eval_metrics_config=[],
                          summaries=['loss'],
                          name='test',
                          params=None)

        model(x, y, None, None)

        # Only loss are created
        summaries_by_names = get_tracked(
            collection=tf.GraphKeys.SUMMARIES_BY_NAMES)
        for s_name in summaries_by_names.keys():
            assert 'Loss' in s_name
Exemple #27
0
    def test_build_learning_rate_summaries(self):
        training.create_global_step()
        x = {'x': tf.placeholder(tf.float32, [2, 89])}
        y = tf.constant([[1], [1]])

        model = BaseModel(
            plx.Modes.TRAIN,
            graph_fn=self.get_dummy_graph_fn(),
            loss=LogLossConfig(),
            optimizer=AdadeltaConfig(decay_type='exponential_decay'),
            model_type=BaseModel.Types.CLASSIFIER,
            metrics=[],
            summaries=['learning_rate'],
            name='test')

        model(x, y, None, None)

        # Only var are created
        summaries_names = list(
            get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys())
        assert len(summaries_names) == 1
        assert summaries_names[0] == 'learning_rate'
Exemple #28
0
def add_loss_summaries(total_loss, loss):
    """Adds loss scalar summaries.

    Args:
        total_loss: `Tensor`. The total loss (Regression loss + regularization losses).
        loss: `Tensor`. Regression loss.

    Returns:
        The list of created loss summaries.
    """
    summaries = []

    if total_loss is not None:
        summaries.append(
            get_summary(SummaryTypes.SCALAR, total_loss.op.name, total_loss))

    summaries.append(get_summary(SummaryTypes.SCALAR, 'Loss', loss))

    for regu_loss in get_tracked(tf.GraphKeys.REGULARIZATION_LOSSES):
        summaries.append(
            get_summary(SummaryTypes.SCALAR, regu_loss.op.name, regu_loss))
    return summaries
Exemple #29
0
    def _build_loss(self, results, features, labels):
        """Creates the loss operation

        Returns:
             tuple `(losses, loss)`:
                `losses` are the per-batch losses.
                `loss` is a single scalar tensor to minimize.
        """
        reward, action, done, = labels['reward'], labels['action'], labels['done']
        discount_reward = labels['discount_reward']

        log_probs = self._graph_results.distribution.log_prob(action)
        losses = tf.multiply(x=log_probs, y=discount_reward)
        loss = -tf.reduce_mean(losses, axis=0, name='loss')

        self._losses = losses
        self._loss = loss

        other_losses = get_tracked(tf.GraphKeys.REGULARIZATION_LOSSES)
        if other_losses:
            loss = [loss] + other_losses
            loss = tf.add_n(loss, name="TotalLoss")
            self._total_loss = loss
        return losses, loss