def run_pipeline(self, barrier, writes_output=True): steps = [{ 'per_element_delay': 1 }, { 'per_element_delay': 1, 'splittable': True }] args = [ '--barrier=%s' % barrier, '--runner=DirectRunner', '--steps=%s' % json.dumps(steps), '--input=%s' % json.dumps(input_spec(10, 1, 1)) ] if writes_output: output_location = tempfile.NamedTemporaryFile().name args.append('--output=%s' % output_location) synthetic_pipeline.run(args, save_main_session=False) # Verify output if writes_output: read_output = [] for file_name in glob.glob(output_location + '*'): with open(file_name, 'rb') as f: read_output.extend(f.read().splitlines()) self.assertEqual(10, len(read_output))
def run_pipeline(self, barrier, writes_output=True): steps = [{'per_element_delay': 1}, {'per_element_delay': 1}] args = ['--barrier=%s' % barrier, '--runner=DirectRunner', '--steps=%s' % json.dumps(steps), '--input=%s' % json.dumps(input_spec(10, 1, 1))] if writes_output: output_location = tempfile.NamedTemporaryFile().name args.append('--output=%s' % output_location) synthetic_pipeline.run(args) # Verify output if writes_output: read_output = [] for file_name in glob.glob(output_location + '*'): with open(file_name, 'r') as f: read_output.extend(f.read().splitlines()) self.assertEqual(10, len(read_output))