コード例 #1
0
ファイル: slflattice.py プロジェクト: senarvi/theanolm
    def _read_slf_header(self, fields):
        """Reads SLF lattice header fields and saves them in member variables.

        :type fields: list of strs
        :param fields: fields, such as name="value"
        """
        
        for field in fields:
            name, value = self._split_slf_field(field)
            if (name == 'UTTERANCE') or (name == 'U'):
                self.utterance_id = value
            elif (name == 'SUBLAT') or (name == 'S'):
                raise InputError("Sub-lattices are not supported.")
            elif name == 'base':
                value = numpy.float64(value)
                if value == 0.0:
                    self._log_scale = None
                else:
                    self._log_scale = logprob_type(numpy.log(value))
            elif name == 'lmscale':
                self.lm_scale = logprob_type(value)
            elif name == 'wdpenalty':
                self.wi_penalty = logprob_type(value)
            elif name == 'start':
                self._initial_node_id = int(value)
            elif name == 'end':
                self._final_node_id = int(value)
            elif (name == 'NODES') or (name == 'N'):
                self._num_nodes = int(value)
            elif (name == 'LINKS') or (name == 'L'):
                self._num_links = int(value)
コード例 #2
0
ファイル: slflattice.py プロジェクト: haiphong129/theanolm
    def _read_slf_header(self, fields):
        """Reads SLF lattice header fields and saves them in member variables.

        :type fields: list of strs
        :param fields: fields, such as name="value"
        """

        for field in fields:
            name, value = _split_slf_field(field)
            if (name == 'UTTERANCE') or (name == 'U'):
                self.utterance_id = value
            elif (name == 'SUBLAT') or (name == 'S'):
                raise InputError("Sub-lattices are not supported.")
            elif name == 'base':
                value = numpy.float64(value)
                if value == 0.0:
                    self._log_scale = None
                else:
                    self._log_scale = logprob_type(numpy.log(value))
            elif name == 'lmscale':
                self.lm_scale = logprob_type(value)
            elif name == 'wdpenalty':
                self.wi_penalty = logprob_type(value)
            elif name == 'start':
                self._initial_node_id = int(value)
            elif name == 'end':
                self._final_node_id = int(value)
            elif (name == 'NODES') or (name == 'N'):
                self._num_nodes = int(value)
            elif (name == 'LINKS') or (name == 'L'):
                self._num_links = int(value)
コード例 #3
0
ファイル: slflattice.py プロジェクト: haiphong129/theanolm
    def _read_slf_link(self, link_id, fields):
        """Reads SLF lattice link fields and creates such link.

        :type link_id: int
        :param link_id: ID of the link

        :type fields: list of strs
        :param fields: the rest of the link fields after ID
        """

        start_node = None
        end_node = None
        word = None
        ac_logprob = None
        lm_logprob = None

        for field in fields:
            name, value = _split_slf_field(field)
            if (name == 'START') or (name == 'S'):
                start_node = self.nodes[int(value)]
            elif (name == 'END') or (name == 'E'):
                end_node = self.nodes[int(value)]
            elif (name == 'WORD') or (name == 'W'):
                word = value
            elif (name == 'acoustic') or (name == 'a'):
                if self._log_scale is None:
                    ac_logprob = logprob_type(numpy.log(numpy.float64(value)))
                else:
                    ac_logprob = logprob_type(value) * self._log_scale
            elif (name == 'language') or (name == 'l'):
                if self._log_scale is None:
                    lm_logprob = logprob_type(numpy.log(numpy.float64(value)))
                else:
                    lm_logprob = logprob_type(value) * self._log_scale

        if start_node is None:
            raise InputError(
                "Start node is not specified for link {}.".format(link_id))
        if end_node is None:
            raise InputError(
                "End node is not specified for link {}.".format(link_id))
        link = self._add_link(start_node, end_node)
        link.word = word
        link.ac_logprob = ac_logprob
        link.lm_logprob = lm_logprob
コード例 #4
0
ファイル: slflattice.py プロジェクト: senarvi/theanolm
    def _read_slf_link(self, link_id, fields):
        """Reads SLF lattice link fields and creates such link.

        :type link_id: int
        :param link_id: ID of the link

        :type fields: list of strs
        :param fields: the rest of the link fields after ID
        """

        start_node = None
        end_node = None
        word = None
        ac_logprob = None
        lm_logprob = None

        for field in fields:
            name, value = self._split_slf_field(field)
            if (name == 'START') or (name == 'S'):
                start_node = self.nodes[int(value)]
            elif (name == 'END') or (name == 'E'):
                end_node = self.nodes[int(value)]
            elif (name == 'WORD') or (name == 'W'):
                word = value
            elif (name == 'acoustic') or (name == 'a'):
                if self._log_scale is None:
                    ac_logprob = logprob_type(numpy.log(numpy.float64(value)))
                else:
                    ac_logprob = logprob_type(value) * self._log_scale
            elif (name == 'language') or (name == 'l'):
                if self._log_scale is None:
                    lm_logprob = logprob_type(numpy.log(numpy.float64(value)))
                else:
                    lm_logprob = logprob_type(value) * self._log_scale

        if start_node is None:
            raise InputError("Start node is not specified for link %d.".format(
                             link_id))
        if end_node is None:
            raise InputError("End node is not specified for link %d.".format(
                             link_id))
        link = self._add_link(start_node, end_node)
        link.word = word
        link.ac_logprob = ac_logprob
        link.lm_logprob = lm_logprob
コード例 #5
0
        def __init__(self,
                     history=None,
                     state=None,
                     ac_logprob=logprob_type(0.0),
                     lat_lm_logprob=logprob_type(0.0),
                     nn_lm_logprob=logprob_type(0.0)):
            """Constructs a token with given recurrent state and logprobs.

            The constructor won't compute the total logprob. The user is
            responsible for computing it when necessary, to avoid unnecessary
            overhead.

            New tokens will not have recombination hash and total log
            probability set.

            :type history: list of ints
            :param history: word IDs that the token has passed

            :type state: RecurrentState
            :param state: the state of the recurrent layers for a single
                          sequence

            :type ac_logprob: logprob_type
            :param ac_logprob: sum of the acoustic log probabilities of the
                               lattice links

            :type lat_lm_logprob: logprob_type
            :param lat_lm_logprob: sum of the LM log probabilities of the
                                   lattice links

            :type nn_lm_logprob: logprob_type
            :param nn_lm_logprob: sum of the NNLM log probabilities of the
                                  lattice links
            """

            self.history = [] if history is None else history
            self.state = [] if state is None else state
            self.ac_logprob = ac_logprob
            self.lat_lm_logprob = lat_lm_logprob
            self.nn_lm_logprob = nn_lm_logprob
            self.recombination_hash = None
            self.lm_logprob = None
            self.total_logprob = None
コード例 #6
0
ファイル: slflattice.py プロジェクト: haiphong129/theanolm
    def __init__(self, lattice_file):
        """Reads an SLF lattice file.

        If ``lattice_file`` is ``None``, creates an empty lattice (useful for
        testing).

        :type lattice_file: file object
        :param lattice_file: a file in SLF lattice format
        """

        super().__init__()

        # No log conversion by default. "None" means the lattice file uses
        # linear probabilities.
        self._log_scale = logprob_type(1.0)

        self._initial_node_id = None
        self._final_node_id = None

        if lattice_file is None:
            self._num_nodes = 0
            self._num_links = 0
            return

        self._num_nodes = None
        self._num_links = None
        for line in lattice_file:
            fields = _split_slf_line(line)
            self._read_slf_header(fields)
            if (self._num_nodes is not None) and (self._num_links is not None):
                break

        if self.wi_penalty is not None:
            if self._log_scale is None:
                self.wi_penalty = numpy.log(self.wi_penalty)
            else:
                self.wi_penalty *= self._log_scale

        self.nodes = [self.Node(node_id) for node_id in range(self._num_nodes)]

        for line in lattice_file:
            fields = _split_slf_line(line)
            if not fields:
                continue
            name, value = _split_slf_field(fields[0])
            if name == 'I':
                self._read_slf_node(int(value), fields[1:])
            elif name == 'J':
                self._read_slf_link(int(value), fields[1:])

        if len(self.links) != self._num_links:
            raise InputError(
                "Number of links in SLF lattice doesn't match the "
                "LINKS field.")

        if self._initial_node_id is not None:
            self.initial_node = self.nodes[self._initial_node_id]
        else:
            # Find the node with no incoming links.
            self.initial_node = None
            for node in self.nodes:
                if len(node.in_links) == 0:
                    self.initial_node = node
                    break
            if self.initial_node is None:
                raise InputError("Could not find initial node in SLF lattice.")

        if self._final_node_id is not None:
            self.final_node = self.nodes[self._final_node_id]
        else:
            # Find the node with no outgoing links.
            self.final_node = None
            for node in self.nodes:
                if len(node.out_links) == 0:
                    self.final_node = node
                    break
            if self.final_node is None:
                raise InputError("Could not find final node in SLF lattice.")

        # If word identity information is not present in node definitions then
        # it must appear in link definitions.
        self._move_words_to_links()
        for link in self.links:
            if link.word is None:
                raise InputError("SLF lattice does not contain word identity "
                                 "in link {} or in the following node.".format(
                                     link.id))
コード例 #7
0
    def decode(self, lattice):
        """Propagates tokens through given lattice and returns a list of tokens
        in the final node.

        Propagates tokens at a node to every outgoing link by creating a copy of
        each token and updating the language model scores according to the link.

        :type lattice: Lattice
        :param lattice: a word lattice to be decoded

        :rtype: list of LatticeDecoder.Tokens
        :returns: the final tokens sorted by total log probability in descending
                  order
        """

        if self._lm_scale is not None:
            lm_scale = logprob_type(self._lm_scale)
        elif lattice.lm_scale is not None:
            lm_scale = logprob_type(lattice.lm_scale)
        else:
            lm_scale = logprob_type(1.0)

        if self._wi_penalty is not None:
            wi_penalty = logprob_type(self._wi_penalty)
        if lattice.wi_penalty is not None:
            wi_penalty = logprob_type(lattice.wi_penalty)
        else:
            wi_penalty = logprob_type(0.0)

        self._tokens = [list() for _ in lattice.nodes]
        initial_state = RecurrentState(self._network.recurrent_state_size)
        initial_token = self.Token(history=[self._sos_id], state=initial_state)
        initial_token.recompute_hash(self._recombination_order)
        initial_token.recompute_total(self._nnlm_weight, lm_scale, wi_penalty,
                                      self._linear_interpolation)
        self._tokens[lattice.initial_node.id].append(initial_token)
        lattice.initial_node.best_logprob = initial_token.total_logprob

        self._sorted_nodes = lattice.sorted_nodes()
        nodes_processed = 0
        for node in self._sorted_nodes:
            node_tokens = self._tokens[node.id]
            assert node_tokens
            num_pruned_tokens = len(node_tokens)
            self._prune(node)
            node_tokens = self._tokens[node.id]
            assert node_tokens
            num_pruned_tokens -= len(node_tokens)

            if node.id == lattice.final_node.id:
                new_tokens = self._propagate(node_tokens, None, lm_scale,
                                             wi_penalty)
                return sorted(new_tokens,
                              key=lambda token: token.total_logprob,
                              reverse=True)

            num_new_tokens = 0
            for link in node.out_links:
                new_tokens = self._propagate(node_tokens, link, lm_scale,
                                             wi_penalty)
                self._tokens[link.end_node.id].extend(new_tokens)
                num_new_tokens += len(new_tokens)

            nodes_processed += 1
            if nodes_processed % math.ceil(len(self._sorted_nodes) / 20) == 0:
                logging.debug("[%d] (%.2f %%) -- tokens = %d +%d -%d",
                              nodes_processed,
                              nodes_processed / len(self._sorted_nodes) * 100,
                              len(node_tokens), num_new_tokens,
                              num_pruned_tokens)

        raise InputError("Could not reach the final node of word lattice.")
コード例 #8
0
    def __init__(self, network, decoding_options, profile=False):
        """Creates a Theano function that computes the output probabilities for
        a single time step.

        Creates the function self._step_function that takes as input a set of
        word sequences and the current recurrent states. It uses the previous
        states and word IDs to compute the output distributions, and computes
        the probabilities of the target words.

        All invocations of ``decode()`` will use the given NNLM weight and LM
        scale when computing the total probability. If LM scale is not given,
        uses the value provided in the lattice files. If it's not provided in a
        lattice file either, performs no scaling of LM log probabilities.

        ``decoding_options`` should countain the following elements:

        nnlm_weight : float
          weight of the neural network probabilities when interpolating with the
          lattice probabilities

        lm_scale : float
          if other than ``None``, the decoder will scale language model log
          probabilities by this factor; otherwise the scaling factor will be
          read from the lattice file

        wi_penalty : float
          penalize word insertion by adding this value to the total log
          probability of a token as many times as there are words

        ignore_unk : bool
          if set to ``True``, <unk> tokens are excluded from perplexity
          computation

        unk_penalty : float
          if set to othern than None, used as <unk> token score

        linear_interpolation : bool
          if set to ``True``, use linear instead of (pseudo) log-linear
          interpolation of language model probabilities

        max_tokens_per_node : int
          if set to other than None, leave only this many tokens at each node

        beam : float
          if set to other than None, prune tokens whose total log probability is
          further than this from the best token at each point in time

        recombination_order : int
          number of words to consider when deciding whether two tokens should be
          recombined, or ``None`` for the entire word history

        :type network: Network
        :param network: the neural network object

        :type decoding_options: dict
        :param decoding_options: a dictionary of decoding options (see above)

        :type profile: bool
        :param profile: if set to True, creates a Theano profile object
        """

        self._network = network
        self._vocabulary = network.vocabulary
        self._nnlm_weight = logprob_type(decoding_options['nnlm_weight'])
        self._lm_scale = decoding_options['lm_scale']
        self._wi_penalty = decoding_options['wi_penalty']
        self._ignore_unk = decoding_options['ignore_unk']
        self._unk_penalty = decoding_options['unk_penalty']
        self._linear_interpolation = decoding_options['linear_interpolation']
        self._max_tokens_per_node = decoding_options['max_tokens_per_node']
        self._beam = decoding_options['beam']
        if self._beam is not None:
            self._beam = logprob_type(self._beam)
        self._recombination_order = decoding_options['recombination_order']

        self._sos_id = self._vocabulary.word_to_id['<s>']
        self._eos_id = self._vocabulary.word_to_id['</s>']
        self._unk_id = self._vocabulary.word_to_id['<unk>']

        inputs = [
            network.input_word_ids, network.input_class_ids,
            network.target_class_ids
        ]
        inputs.extend(network.recurrent_state_input)

        outputs = [tensor.log(network.target_probs())]
        outputs.extend(network.recurrent_state_output)

        # Ignore unused input, because is_training is only used by dropout
        # layer.
        self._step_function = theano.function(inputs,
                                              outputs,
                                              givens=[(network.is_training,
                                                       numpy.int8(0))],
                                              name='step_predictor',
                                              profile=profile,
                                              on_unused_input='ignore')

        self._tokens = None
        self._sorted_nodes = None
コード例 #9
0
ファイル: slflattice.py プロジェクト: senarvi/theanolm
    def __init__(self, lattice_file):
        """Reads an SLF lattice file.

        If ``lattice_file`` is ``None``, creates an empty lattice (useful for
        testing).

        :type lattice_file: file object
        :param lattice_file: a file in SLF lattice format
        """

        super().__init__()

        # No log conversion by default. "None" means the lattice file uses
        # linear probabilities.
        self._log_scale = logprob_type(1.0)

        self._initial_node_id = None
        self._final_node_id = None

        if lattice_file is None:
            self._num_nodes = 0
            self._num_links = 0
            return

        self._num_nodes = None
        self._num_links = None
        for line in lattice_file:
            fields = self._split_slf_line(line)
            self._read_slf_header(fields)
            if (not self._num_nodes is None) and (not self._num_links is None):
                break

        if not self.wi_penalty is None:
            if self._log_scale is None:
                self.wi_penalty = numpy.log(self.wi_penalty)
            else:
                self.wi_penalty *= self._log_scale

        self.nodes = [self.Node(id) for id in range(self._num_nodes)]

        for line in lattice_file:
            fields = self._split_slf_line(line)
            if not fields:
                continue
            name, value = self._split_slf_field(fields[0])
            if name == 'I':
                self._read_slf_node(int(value), fields[1:])
            elif name == 'J':
                self._read_slf_link(int(value), fields[1:])

        if len(self.links) != self._num_links:
            raise InputError("Number of links in SLF lattice doesn't match the "
                             "LINKS field.")

        if not self._initial_node_id is None:
            self.initial_node = self.nodes[self._initial_node_id]
        else:
            # Find the node with no incoming links.
            self.initial_node = None
            for node in self.nodes:
                if len(node.in_links) == 0:
                    self.initial_node = node
                    break
            if self.initial_node is None:
                raise InputError("Could not find initial node in SLF lattice.")

        if not self._final_node_id is None:
            self.final_node = self.nodes[self._final_node_id]
        else:
            # Find the node with no outgoing links.
            self.final_node = None
            for node in self.nodes:
                if len(node.out_links) == 0:
                    self.final_node = node
                    break
            if self.final_node is None:
                raise InputError("Could not find final node in SLF lattice.")

        # If word identity information is not present in node definitions then
        # it must appear in link definitions.
        self._move_words_to_links()
        for link in self.links:
            if link.word is None:
                raise InputError("SLF lattice does not contain word identity "
                                 "in link %d or in the following node.".format(
                                 link.id))