class SoftmaxReadout(MergeReadout, Random):

    def __init__(self, num_tokens, **kwargs):
        kwargs['post_merge_dim'] = num_tokens
        super(SoftmaxReadout, self).__init__(**kwargs)
        self.num_tokens = num_tokens
        self.softmax = NDimensionalSoftmax()
        self.children += [self.softmax]

        self.costs.inputs = [
            'prediction', 'prediction_mask',
            'groundtruth', 'groundtruth_mask']
        self.all_scores.inputs = ['prediction']
        self.scores.inputs = []
        self.sample.inputs = []
        for application_method in [self.costs, self.all_scores,
                                   self.scores, self.sample]:
            application_method.inputs += self.input_names

        self.sample.outputs = ['samples', 'scores']

    @application
    def costs(self, prediction, prediction_mask,
              groundtruth, groundtruth_mask, **inputs):
        log_probs = self.all_scores(
            prediction, self.merge(**dict_subset(inputs, self.merge_names)))
        if not prediction_mask:
            prediction_mask = 1
        return -(log_probs * prediction_mask).sum(axis=0)

    @application
    def all_scores(self, prediction, merged):
        return -self.softmax.categorical_cross_entropy(
            prediction, merged, extra_ndim=1)

    @application
    def scores(self, **inputs):
        return self.softmax.log_probabilities(self.merge(
            **dict_subset(inputs, self.merge_names)))

    @application
    def sample(self, **inputs):
        scores = self.scores(**inputs)
        probs = tensor.exp(scores)
        sample = self.theano_rng.multinomial(pvals=probs).argmax(axis=1)
        return sample, scores[tensor.arange(probs.shape[0]), sample]

    def get_dim(self, name):
        if name == 'samples' or name == 'scores':
            return 0
        return super(SoftmaxReadout, self).get_dim(name)
Beispiel #2
0
class SoftmaxReadout(MergeReadout, Random):
    def __init__(self, num_tokens, **kwargs):
        kwargs['post_merge_dim'] = num_tokens
        super(SoftmaxReadout, self).__init__(**kwargs)
        self.num_tokens = num_tokens
        self.softmax = NDimensionalSoftmax()
        self.children += [self.softmax]

        self.costs.inputs = [
            'prediction', 'prediction_mask', 'groundtruth', 'groundtruth_mask'
        ]
        self.all_scores.inputs = ['prediction']
        self.scores.inputs = []
        self.sample.inputs = []
        for application_method in [
                self.costs, self.all_scores, self.scores, self.sample
        ]:
            application_method.inputs += self.input_names

        self.sample.outputs = ['samples', 'scores']

    @application
    def costs(self, prediction, prediction_mask, groundtruth, groundtruth_mask,
              **inputs):
        log_probs = self.all_scores(
            prediction, self.merge(**dict_subset(inputs, self.merge_names)))
        if not prediction_mask:
            prediction_mask = 1
        return -(log_probs * prediction_mask).sum(axis=0)

    @application
    def all_scores(self, prediction, merged):
        return -self.softmax.categorical_cross_entropy(
            prediction, merged, extra_ndim=1)

    @application
    def scores(self, **inputs):
        return self.softmax.log_probabilities(
            self.merge(**dict_subset(inputs, self.merge_names)))

    @application
    def sample(self, **inputs):
        scores = self.scores(**inputs)
        probs = tensor.exp(scores)
        sample = self.theano_rng.multinomial(pvals=probs).argmax(axis=1)
        return sample, scores[tensor.arange(probs.shape[0]), sample]

    def get_dim(self, name):
        if name == 'samples' or name == 'scores':
            return 0
        return super(SoftmaxReadout, self).get_dim(name)
Beispiel #3
0
    def __init__(self, num_tokens, **kwargs):
        kwargs['post_merge_dim'] = num_tokens
        super(SoftmaxReadout, self).__init__(**kwargs)
        self.num_tokens = num_tokens
        self.softmax = NDimensionalSoftmax()
        self.children += [self.softmax]

        self.costs.inputs = [
            'prediction', 'prediction_mask', 'groundtruth', 'groundtruth_mask'
        ]
        self.all_scores.inputs = ['prediction']
        self.scores.inputs = []
        self.sample.inputs = []
        for application_method in [
                self.costs, self.all_scores, self.scores, self.sample
        ]:
            application_method.inputs += self.input_names

        self.sample.outputs = ['samples', 'scores']
    def __init__(self, num_tokens, **kwargs):
        kwargs['post_merge_dim'] = num_tokens
        super(SoftmaxReadout, self).__init__(**kwargs)
        self.num_tokens = num_tokens
        self.softmax = NDimensionalSoftmax()
        self.children += [self.softmax]

        self.costs.inputs = [
            'prediction', 'prediction_mask',
            'groundtruth', 'groundtruth_mask']
        self.all_scores.inputs = ['prediction']
        self.scores.inputs = []
        self.sample.inputs = []
        for application_method in [self.costs, self.all_scores,
                                   self.scores, self.sample]:
            application_method.inputs += self.input_names

        self.sample.outputs = ['samples', 'scores']