def precompute_fixed(self, input):
     embeddings, _ = self.embedder(self._init_embed(input))
     # Use a CachedLookup such that if we repeatedly index this object with the same index we only need to do
     # the lookup once... this is the case if all elements in the batch have maximum batch size
     return CachedLookup(self._precompute(embeddings))
Example #2
0
 def precompute_fixed(self, nodes, graph):
     embeddings = self.embedder(self._init_embed(nodes), graph)
     # Use a CachedLookup such that if we repeatedly index this object with 
     # the same index, we only need to do the lookup once... 
     # this is the case if all elements in the batch have maximum batch size
     return CachedLookup(self._precompute(embeddings))