def __init__(self, cache=None, blocking=False):
     # Cache of CloudWorkflowStep protos generated while the runner
     # "executes" a pipeline.
     self._cache = cache if cache is not None else PValueCache()
     self.blocking = blocking
     self.result = None
     self._unique_step_id = 0
 def run(self, pipeline):
     try:
         self._cache = PValueCache(use_disk_backed_cache=True)
         return super(DirectPipelineRunner, self).run(pipeline)
     finally:
         del self._cache
         self._cache = self._null_cache
Exemple #3
0
 def __init__(self, cache=None):
     # Cache of values computed while the runner executes a pipeline.
     self._cache = cache if cache is not None else PValueCache()
     self._counter_factory = counters.CounterFactory()
     # Element counts used for debugging footprint issues in the direct runner.
     # The values computed are used only for logging and do not take part in
     # any decision making logic. The key for the counter dictionary is either
     # the full label for the transform producing the elements or a tuple
     # (full label, output tag) for ParDo transforms since they can output values
     # on multiple outputs.
     self.debug_counters = {}
     self.debug_counters['element_counts'] = collections.Counter()