Ejemplo n.º 1
0
    def _setup_metrics(self):
        self.metric_functions = {}  # needed to shadow class variable
        if self.decoder == 'generator' and self.decoder_obj.beam_width > 1:
            # Generator Decoder w/ beam search
            # beam search does not provide logits
            self.metric_functions[LOSS] = SequenceLossMetric(from_logits=False)
        else:
            # Generator Decoder w/ no beam search and Tagger Decoder
            self.metric_functions[LOSS] = SequenceLossMetric(from_logits=True)

        self.metric_functions[TOKEN_ACCURACY] = TokenAccuracyMetric()
        self.metric_functions[SEQUENCE_ACCURACY] = SequenceAccuracyMetric()
        self.metric_functions[LAST_ACCURACY] = SequenceLastAccuracyMetric()
        self.metric_functions[PERPLEXITY] = PerplexityMetric()
        self.metric_functions[EDIT_DISTANCE] = EditDistanceMetric()
Ejemplo n.º 2
0
    def _setup_loss(self):
        if self.loss[TYPE] == 'softmax_cross_entropy':
            self.train_loss_function = SequenceLoss()
        elif self.loss[TYPE] == 'sampled_softmax_cross_entropy':
            self.train_loss_function = SampledSoftmaxCrossEntropyLoss(
                decoder_obj=self.decoder_obj,
                num_classes=self.num_classes,
                feature_loss=self.loss,
                name='train_loss')
        else:
            raise ValueError("Loss type {} is not supported. Valid values are "
                             "'softmax_cross_entropy' or "
                             "'sampled_softmax_cross_entropy'".format(
                                 self.loss[TYPE]))

        self.eval_loss_function = SequenceLossMetric()