def run(self): queries = { 0: query0, # TODO(mariagh): Add more queries. } query_errors = [] for i in self.args.query: self.parse_args() logging.info('Running query %d', i) # The DirectRunner is the default runner, and it needs # special handling to cancel streaming jobs. launch_from_direct_runner = self.pipeline_options.view_as( StandardOptions).runner in [None, 'DirectRunner'] if launch_from_direct_runner: command = Command(self.run_query, args=[queries[i], query_errors]) query_duration = self.pipeline_options.view_as(TestOptions).wait_until_finish_duration # pylint: disable=line-too-long command.run(timeout=query_duration // 1000) else: try: self.run_query(queries[i], query_errors=None) except Exception as exc: query_errors.append(exc) if query_errors: logging.error('Query failed with %s', ', '.join(query_errors)) else: logging.info('Queries run: %s', self.args.query)
def run(self): queries = { 0: query0, 1: query1, 2: query2, 3: query3, 4: query4, 5: query5, 6: query6, 7: query7, 8: query8, 9: query9, 11: query11, 12: query12 } # TODO(mariagh): Move to a config file. query_args = { 'auction_skip': 123, 'window_size_sec': 10, 'window_period_sec': 5, 'fanout': 5, 'num_max_workers': 5, 'max_log_events': 100000, 'occasional_delay_sec': 3, 'max_auction_waiting_time': 600 } query_errors = [] for i in self.args.query: self.parse_args() logging.info('Running query %d', i) # The DirectRunner is the default runner, and it needs # special handling to cancel streaming jobs. launch_from_direct_runner = self.pipeline_options.view_as( StandardOptions).runner in [None, 'DirectRunner'] query_duration = self.pipeline_options.view_as(TestOptions).wait_until_finish_duration # pylint: disable=line-too-long if launch_from_direct_runner: command = Command(self.run_query, args=[queries[i], query_args, query_errors]) command.run(timeout=query_duration // 1000) else: try: self.run_query(queries[i], query_args, query_errors=query_errors) except Exception as exc: query_errors.append(str(exc)) if query_errors: logging.error('Query failed with %s', ', '.join(query_errors)) else: logging.info('Queries run: %s', self.args.query)
def run(self): queries = { 0: query0, 1: query1, 2: query2, # TODO(mariagh): Add more queries. } # TODO(mariagh): Move to a config file. query_args = { 2: { 'auction_id': 'a1003' } } query_errors = [] for i in self.args.query: self.parse_args() logging.info('Running query %d', i) # The DirectRunner is the default runner, and it needs # special handling to cancel streaming jobs. launch_from_direct_runner = self.pipeline_options.view_as( StandardOptions).runner in [None, 'DirectRunner'] query_duration = self.pipeline_options.view_as(TestOptions).wait_until_finish_duration # pylint: disable=line-too-long if launch_from_direct_runner: command = Command(self.run_query, args=[queries[i], query_args.get(i), query_errors]) command.run(timeout=query_duration // 1000) else: try: self.run_query(queries[i], query_args.get(i), query_errors=None) except Exception as exc: query_errors.append(exc) if query_errors: logging.error('Query failed with %s', ', '.join(query_errors)) else: logging.info('Queries run: %s', self.args.query)