def test_map(self): pipeline = Pipeline('DirectPipelineRunner') lines = pipeline | Create('input', ['a', 'b', 'c']) result = (lines | Map('upper', str.upper) | Map('prefix', lambda x, prefix: prefix + x, 'foo-')) assert_that(result, equal_to(['foo-A', 'foo-B', 'foo-C'])) pipeline.run()
def test_cached_pvalues_are_refcounted(self): """Test that cached PValues are refcounted and deleted. The intermediary PValues computed by the workflow below contain one million elements so if the refcounting does not work the number of objects tracked by the garbage collector will increase by a few millions by the time we execute the final Map checking the objects tracked. Anything that is much larger than what we started with will fail the test. """ def check_memory(value, count_threshold): gc.collect() objects_count = len(gc.get_objects()) if objects_count > count_threshold: raise RuntimeError( 'PValues are not refcounted: %s, %s' % ( objects_count, count_threshold)) return value def create_dupes(o, _): yield o yield SideOutputValue('side', o) pipeline = Pipeline('DirectPipelineRunner') gc.collect() count_threshold = len(gc.get_objects()) + 10000 biglist = pipeline | Create('oom:create', ['x'] * 1000000) dupes = ( biglist | Map('oom:addone', lambda x: (x, 1)) | FlatMap('oom:dupes', create_dupes, AsIter(biglist)).with_outputs('side', main='main')) result = ( (dupes.side, dupes.main, dupes.side) | Flatten('oom:flatten') | CombinePerKey('oom:combine', sum) | Map('oom:check', check_memory, count_threshold)) assert_that(result, equal_to([('x', 3000000)])) pipeline.run() self.assertEqual( pipeline.runner.debug_counters['element_counts'], { 'oom:flatten': 3000000, ('oom:combine/GroupByKey/reify_windows', None): 3000000, ('oom:dupes/oom:dupes', 'side'): 1000000, ('oom:dupes/oom:dupes', None): 1000000, 'oom:create': 1000000, ('oom:addone', None): 1000000, 'oom:combine/GroupByKey/group_by_key': 1, ('oom:check', None): 1, 'assert_that/singleton': 1, ('assert_that/Map(match)', None): 1, ('oom:combine/GroupByKey/group_by_window', None): 1, ('oom:combine/Combine/ParDo(CombineValuesDoFn)', None): 1})
def test_timestamped_with_combiners(self): p = Pipeline('DirectPipelineRunner') result = (p # Create some initial test values. | Create('start', [(k, k) for k in range(10)]) # The purpose of the WindowInto transform is to establish a # FixedWindows windowing function for the PCollection. # It does not bucket elements into windows since the timestamps # from Create are not spaced 5 ms apart and very likely they all # fall into the same window. | WindowInto('w', FixedWindows(5)) # Generate timestamped values using the values as timestamps. # Now there are values 5 ms apart and since Map propagates the # windowing function from input to output the output PCollection # will have elements falling into different 5ms windows. | Map(lambda (x, t): TimestampedValue(x, t)) # We add a 'key' to each value representing the index of the # window. This is important since there is no guarantee of # order for the elements of a PCollection. | Map(lambda v: (v / 5, v))) # Sum all elements associated with a key and window. Although it # is called CombinePerKey it is really CombinePerKeyAndWindow the # same way GroupByKey is really GroupByKeyAndWindow. sum_per_window = result | CombinePerKey(sum) # Compute mean per key and window. mean_per_window = result | combiners.Mean.PerKey() assert_that(sum_per_window, equal_to([(0, 10), (1, 35)]), label='assert:sum') assert_that(mean_per_window, equal_to([(0, 2.0), (1, 7.0)]), label='assert:mean') p.run()
def test_pipeline_as_context(self): def raise_exception(exn): raise exn with self.assertRaises(ValueError): with Pipeline(self.runner_name) as p: # pylint: disable=expression-not-assigned p | Create([ValueError]) | Map(raise_exception)
def test_timestamped_value(self): p = Pipeline('DirectPipelineRunner') result = (p | Create('start', [(k, k) for k in range(10)]) | Map(lambda (x, t): TimestampedValue(x, t)) | WindowInto('w', FixedWindows(5)) | Map(lambda v: ('key', v)) | GroupByKey()) assert_that(result, equal_to([('key', [0, 1, 2, 3, 4]), ('key', [5, 6, 7, 8, 9])])) p.run()
def apply(self, pcoll): return pcoll | Map(lambda x: x + self.suffix)
def Count(pcoll): # pylint: disable=invalid-name """A Count transform: v, ... => (v, n), ...""" return (pcoll | Map('AddCount', lambda x: (x, 1)) | GroupByKey('GroupCounts') | Map('AddCounts', lambda (x, ones): (x, sum(ones))))
def timestamped_key_values(self, pipeline, key, *timestamps): return (pipeline | Create('start', timestamps) | Map(lambda x: WindowedValue((key, x), x, [])))
def test_eager_pipeline(self): p = Pipeline('EagerPipelineRunner') self.assertEqual([1, 4, 9], p | Create([1, 2, 3]) | Map(lambda x: x * x))