def call(self, dct):
   """ Closure.
   """
   # All the hard elements are already coded in mapping.
   # Here is how the mapping procedure works in high frequency:
   # At first, a new point is added to the beginning, with empty paths.
   # When a new point (with its paths) arrives, two cases:
   # We were right after the start point, then we simply add
   # the path and the point. Otherwise, we decimate paths and points,
   # and we merge the paths together.
   (trans1, paths_dct, trans2, sc_dct) = dct
   paths = [decode_Path(path_dct) for path_dct in paths_dct]
   del paths_dct
   sc = decode_StateCollection(sc_dct)
   del sc_dct
   # The index of sc
   point_idx = 2 * self.count
   # The index of the paths
   path_idx = 2 * self.count - 1
   self.count += 1
   (new_decim_sc, new_end_mapping) = \
     decimate_point(sc, self.probas[point_idx], self.viterbi_idxs[point_idx])
   new_most_likely_sc_idx = None
   if point_idx >= 0:
     assert self.viterbi_idxs[point_idx] in new_end_mapping, \
       (point_idx, new_end_mapping)
     new_most_likely_sc_idx = new_end_mapping[self.viterbi_idxs[point_idx]]
   # First element??
   if not self.start_point:
     self.start_point = new_decim_sc
     self.start_mapping = new_end_mapping
     assert new_most_likely_sc_idx is not None
     self.most_likely_indexes.append(new_most_likely_sc_idx)
     return ([], [], [], encode_StateCollection(self.start_point))
   # Try to add a new elment:
   if self.paths is None:
     assert self.start_mapping is not None
     assert self.start_point is not None
     # Start a new set of paths
     (new_trans1, decimated_paths, new_trans2, paths_mapping) = \
       decimate_path_simple(self.start_mapping, trans1, paths, \
                            trans2, new_end_mapping)
     assert self.viterbi_idxs[point_idx] in new_end_mapping
     assert self.viterbi_idxs[path_idx] in paths_mapping
     assert (self.start_mapping[self.viterbi_idxs[path_idx-1]], \
               paths_mapping[self.viterbi_idxs[path_idx]]) in new_trans1
     assert (paths_mapping[self.viterbi_idxs[path_idx]], \
               new_end_mapping[self.viterbi_idxs[point_idx]]) in new_trans2
     self.end_point = new_decim_sc
     self.end_mapping = new_end_mapping
     self.start_trans = new_trans1
     self.paths = decimated_paths
     assert self.paths, self.paths
     self.best_idx = paths_mapping[self.viterbi_idxs[path_idx]]
     self.end_trans = new_trans2
   else:
     assert self.start_mapping is not None
     assert self.start_point is not None
     assert self.start_trans is not None
     assert self.end_trans is not None
     assert self.paths is not None
     # First decimate the paths
     (new_trans1, decimated_paths, new_trans2, paths_mapping) = \
       decimate_path_simple(self.end_mapping, trans1, paths, trans2, \
                            new_end_mapping)
     assert self.viterbi_idxs[path_idx] in paths_mapping
     assert self.viterbi_idxs[path_idx-1] in self.end_mapping
     assert (self.end_mapping[self.viterbi_idxs[path_idx-1]], \
             paths_mapping[self.viterbi_idxs[path_idx]]) in new_trans1
     assert (paths_mapping[self.viterbi_idxs[path_idx]], \
             new_end_mapping[self.viterbi_idxs[point_idx]]) in new_trans2
     best_idx2 = paths_mapping[self.viterbi_idxs[path_idx]]
     # Merge the paths together
     (merged_trans1, merged_paths, merged_trans2, merged_best_idx) = \
       merge_path_sequence(self.start_trans, self.paths, self.end_trans, \
                           new_trans1, decimated_paths, new_trans2, \
                           self.best_idx, best_idx2)
     self.end_point = new_decim_sc
     self.end_mapping = new_end_mapping
     self.start_trans = merged_trans1
     self.paths = merged_paths
     self.best_idx = merged_best_idx
     self.end_trans = merged_trans2
   # Time to send a new element to the output and restart?  
   if (self.count-1) % self.decimation_factor == 0:
     assert self.paths
     assert self.end_trans
     assert self.start_trans
     assert self.best_idx is not None
     encoded_paths = [encode_Path(path) for path in self.paths]
     print len(encoded_paths), " paths", len(self.end_point.states), " states"
     result = (self.start_trans, encoded_paths, \
               self.end_trans, encode_StateCollection(self.end_point))
     # Adding the most likely index of the path and of the next point.
     self.most_likely_indexes.append(self.best_idx)
     assert new_most_likely_sc_idx is not None
     self.most_likely_indexes.append(new_most_likely_sc_idx)
     # Restart computations:
     self.start_point = self.end_point
     self.start_mapping = self.end_mapping
     del self.paths
     self.start_trans = None
     self.end_trans = None
     self.paths = None
     self.best_idx = None
     return result
   # Nothing to return for this input, continuing.
   return None
 def call(self, dct):
     """ Closure.
 """
     # All the hard elements are already coded in mapping.
     # Here is how the mapping procedure works in high frequency:
     # At first, a new point is added to the beginning, with empty paths.
     # When a new point (with its paths) arrives, two cases:
     # We were right after the start point, then we simply add
     # the path and the point. Otherwise, we decimate paths and points,
     # and we merge the paths together.
     (trans1, paths_dct, trans2, sc_dct) = dct
     paths = [decode_Path(path_dct) for path_dct in paths_dct]
     del paths_dct
     sc = decode_StateCollection(sc_dct)
     del sc_dct
     # The index of sc
     point_idx = 2 * self.count
     # The index of the paths
     path_idx = 2 * self.count - 1
     self.count += 1
     (new_decim_sc, new_end_mapping) = \
       decimate_point(sc, self.probas[point_idx], self.viterbi_idxs[point_idx])
     new_most_likely_sc_idx = None
     if point_idx >= 0:
         assert self.viterbi_idxs[point_idx] in new_end_mapping, \
           (point_idx, new_end_mapping)
         new_most_likely_sc_idx = new_end_mapping[
             self.viterbi_idxs[point_idx]]
     # First element??
     if not self.start_point:
         self.start_point = new_decim_sc
         self.start_mapping = new_end_mapping
         assert new_most_likely_sc_idx is not None
         self.most_likely_indexes.append(new_most_likely_sc_idx)
         return ([], [], [], encode_StateCollection(self.start_point))
     # Try to add a new elment:
     if self.paths is None:
         assert self.start_mapping is not None
         assert self.start_point is not None
         # Start a new set of paths
         (new_trans1, decimated_paths, new_trans2, paths_mapping) = \
           decimate_path_simple(self.start_mapping, trans1, paths, \
                                trans2, new_end_mapping)
         assert self.viterbi_idxs[point_idx] in new_end_mapping
         assert self.viterbi_idxs[path_idx] in paths_mapping
         assert (self.start_mapping[self.viterbi_idxs[path_idx-1]], \
                   paths_mapping[self.viterbi_idxs[path_idx]]) in new_trans1
         assert (paths_mapping[self.viterbi_idxs[path_idx]], \
                   new_end_mapping[self.viterbi_idxs[point_idx]]) in new_trans2
         self.end_point = new_decim_sc
         self.end_mapping = new_end_mapping
         self.start_trans = new_trans1
         self.paths = decimated_paths
         assert self.paths, self.paths
         self.best_idx = paths_mapping[self.viterbi_idxs[path_idx]]
         self.end_trans = new_trans2
     else:
         assert self.start_mapping is not None
         assert self.start_point is not None
         assert self.start_trans is not None
         assert self.end_trans is not None
         assert self.paths is not None
         # First decimate the paths
         (new_trans1, decimated_paths, new_trans2, paths_mapping) = \
           decimate_path_simple(self.end_mapping, trans1, paths, trans2, \
                                new_end_mapping)
         assert self.viterbi_idxs[path_idx] in paths_mapping
         assert self.viterbi_idxs[path_idx - 1] in self.end_mapping
         assert (self.end_mapping[self.viterbi_idxs[path_idx-1]], \
                 paths_mapping[self.viterbi_idxs[path_idx]]) in new_trans1
         assert (paths_mapping[self.viterbi_idxs[path_idx]], \
                 new_end_mapping[self.viterbi_idxs[point_idx]]) in new_trans2
         best_idx2 = paths_mapping[self.viterbi_idxs[path_idx]]
         # Merge the paths together
         (merged_trans1, merged_paths, merged_trans2, merged_best_idx) = \
           merge_path_sequence(self.start_trans, self.paths, self.end_trans, \
                               new_trans1, decimated_paths, new_trans2, \
                               self.best_idx, best_idx2)
         self.end_point = new_decim_sc
         self.end_mapping = new_end_mapping
         self.start_trans = merged_trans1
         self.paths = merged_paths
         self.best_idx = merged_best_idx
         self.end_trans = merged_trans2
     # Time to send a new element to the output and restart?
     if (self.count - 1) % self.decimation_factor == 0:
         assert self.paths
         assert self.end_trans
         assert self.start_trans
         assert self.best_idx is not None
         encoded_paths = [encode_Path(path) for path in self.paths]
         print len(encoded_paths), " paths", len(
             self.end_point.states), " states"
         result = (self.start_trans, encoded_paths, \
                   self.end_trans, encode_StateCollection(self.end_point))
         # Adding the most likely index of the path and of the next point.
         self.most_likely_indexes.append(self.best_idx)
         assert new_most_likely_sc_idx is not None
         self.most_likely_indexes.append(new_most_likely_sc_idx)
         # Restart computations:
         self.start_point = self.end_point
         self.start_mapping = self.end_mapping
         del self.paths
         self.start_trans = None
         self.end_trans = None
         self.paths = None
         self.best_idx = None
         return result
     # Nothing to return for this input, continuing.
     return None
 def call(self, dct):
   """ Closure.
   """
   (_, paths_dct, _, sc_dct) = dct
   sc = decode_StateCollection(sc_dct)
   del sc_dct
   # The index of sc
   point_idx = 2 * self.count
   # The index of the paths
   path_idx = 2 * self.count - 1
   self.count += 1
   new_most_likely_sc_idx = self.viterbi_idxs[point_idx]
   # First element??
   if not self.start_point:
     self.start_point = sc
     assert new_most_likely_sc_idx is not None
     self.most_likely_indexes.append(new_most_likely_sc_idx)
     return ([], [], [], encode_StateCollection(self.start_point))
   # Only decode the most likely path, we do not need the other paths.
   new_best_path = decode_Path(paths_dct[self.viterbi_idxs[path_idx]])
   del paths_dct
   # Try to add a new element:
   # All this code is much more complicated than it should be now.
   if self.best_path is None:
     assert self.start_point is not None
     self.best_path = new_best_path
   else:
     assert self.start_point is not None
     self.best_path = merge_path(self.best_path, new_best_path)
     assert self.best_path.start in self.start_point.states
     assert self.best_path.end in sc.states
   # Time to send a new element to the output and restart?  
   if (self.count-1) % self.decimation_factor == 0:
     # Time to find all the other paths
     (other_trans1, other_paths, other_trans2) = \
       self.path_builder.getPathsBetweenCollections(self.start_point, sc)
     # If we have the first path already in, no need to integrate it:
     try:
       best_path_idx = other_paths.index(self.best_path)
       new_trans1 = other_trans1
       new_paths = other_paths
       new_trans2 = other_trans2
     except ValueError:
       # We need to append it:
       best_path_idx = len(other_paths)
       prev_best_idx = self.most_likely_indexes[-1]
       new_trans1 = other_trans1 + [(prev_best_idx, best_path_idx)]
       new_paths = other_paths + [self.best_path]
       new_trans2 = other_trans2 + [(best_path_idx, new_most_likely_sc_idx)]
     
     encoded_paths = [encode_Path(path) for path in new_paths]
     print len(encoded_paths), " paths", len(sc.states), " states",
     if len(other_paths) != len(new_paths):
       print '(forced insertion)'
     else:
       print ''
     result = (new_trans1, encoded_paths, \
               new_trans2, encode_StateCollection(sc))
     # Adding the most likely index of the path and of the next point.
     self.most_likely_indexes.append(best_path_idx)
     assert new_most_likely_sc_idx is not None
     self.most_likely_indexes.append(new_most_likely_sc_idx)
     # Restart computations:
     self.start_point = sc
     self.best_path = None
     return result
   # Nothing to return for this input, continuing.
   return None
    def call(self, dct):
        """ Closure.
    """
        (_, paths_dct, _, sc_dct) = dct
        sc = decode_StateCollection(sc_dct)
        del sc_dct
        # The index of sc
        point_idx = 2 * self.count
        # The index of the paths
        path_idx = 2 * self.count - 1
        self.count += 1
        new_most_likely_sc_idx = self.viterbi_idxs[point_idx]
        # First element??
        if not self.start_point:
            self.start_point = sc
            assert new_most_likely_sc_idx is not None
            self.most_likely_indexes.append(new_most_likely_sc_idx)
            return ([], [], [], encode_StateCollection(self.start_point))
        # Only decode the most likely path, we do not need the other paths.
        new_best_path = decode_Path(paths_dct[self.viterbi_idxs[path_idx]])
        del paths_dct
        # Try to add a new element:
        # All this code is much more complicated than it should be now.
        if self.best_path is None:
            assert self.start_point is not None
            self.best_path = new_best_path
        else:
            assert self.start_point is not None
            self.best_path = merge_path(self.best_path, new_best_path)
            assert self.best_path.start in self.start_point.states
            assert self.best_path.end in sc.states
        # Time to send a new element to the output and restart?
        if (self.count - 1) % self.decimation_factor == 0:
            # Time to find all the other paths
            (other_trans1, other_paths, other_trans2) = \
              self.path_builder.getPathsBetweenCollections(self.start_point, sc)
            # If we have the first path already in, no need to integrate it:
            try:
                best_path_idx = other_paths.index(self.best_path)
                new_trans1 = other_trans1
                new_paths = other_paths
                new_trans2 = other_trans2
            except ValueError:
                # We need to append it:
                best_path_idx = len(other_paths)
                prev_best_idx = self.most_likely_indexes[-1]
                new_trans1 = other_trans1 + [(prev_best_idx, best_path_idx)]
                new_paths = other_paths + [self.best_path]
                new_trans2 = other_trans2 + [
                    (best_path_idx, new_most_likely_sc_idx)
                ]

            encoded_paths = [encode_Path(path) for path in new_paths]
            print len(encoded_paths), " paths", len(sc.states), " states",
            if len(other_paths) != len(new_paths):
                print '(forced insertion)'
            else:
                print ''
            result = (new_trans1, encoded_paths, \
                      new_trans2, encode_StateCollection(sc))
            # Adding the most likely index of the path and of the next point.
            self.most_likely_indexes.append(best_path_idx)
            assert new_most_likely_sc_idx is not None
            self.most_likely_indexes.append(new_most_likely_sc_idx)
            # Restart computations:
            self.start_point = sc
            self.best_path = None
            return result
        # Nothing to return for this input, continuing.
        return None