def test_setup_config_with_env_vars(self): args = parse_cmd_args(['program', '--config=./config.yaml', '--config-override=config-env-dev.yaml', '-r']) with staticconf.testing.MockConfiguration(MOCK_CONFIG): setup_config(args, 'test_worker') # pick some key and ensure it ws loaded from config assert read_string('log_stream_name', 'default') != 'default'
def et_scanner_main(args): """ Create an instance of ETScanner and run it once. """ setup_config(args, 'ETScanner') sqs_scanner_queue = SQSWrapper(read_string("sqs.et_scanner_queue_name")) sqs_worker_queue = SQSWrapper(read_string("sqs.et_queue_name")) scanner = ETScanner(TableConnection.get_connection('ScheduledJobs'), sqs_scanner_queue, sqs_worker_queue, Mailer(args.run_local)) scanner.run()
def test_setup_config_with_env_vars(self): args = parse_cmd_args([ 'program', '--config=./config.yaml', '--config-override=config-env-dev.yaml', '-r' ]) with staticconf.testing.MockConfiguration(MOCK_CONFIG): setup_config(args, 'test_worker') # pick some key and ensure it ws loaded from config assert read_string('log_stream_name', 'default') != 'default'
def test_setup_config_no_exceptions(self): args = argparse.Namespace( config=None, config_override=None, run_local=False ) with staticconf.testing.MockConfiguration(MOCK_CONFIG): setup_config(args, 'test_worker')
'pipeline.et_step.s3_to_s3_stream': self.log_stream_name, 'pipeline.et_step.s3_input_suffix': s3_input_suffix, 'pipeline.et_step.mrjob': mrjob, 'pipeline.load_step.s3_to_redshift_stream': self.log_stream_name, 'pipeline.load_step.days_to_check': 1, 'pipeline.load_step.copy_time_est_secs': copy_time_est_secs, }) return yaml_config def _find_mrjob_with_format(self, log_format): """ Look up the log format in the FORMAT_TO_MRJOB dictionary return path of the mrjob to use """ # TODO: replace FORMAT_TO_MRJOB dict with a dynamo db table mrjob_entry = FORMAT_TO_MRJOB.get(log_format) if mrjob_entry is None: mrjob_entry = FORMAT_TO_MRJOB['custom'].format(log_format) return mrjob_entry if __name__ == "__main__": args = parse_cmd_args(sys.argv) setup_config(args, 'ImdWorker') try: ImdWorker( args.config, args.config_override, args.run_local, Mailer(args.run_local), args.dummy_run ).run() except KeyboardInterrupt: pass
def test_setup_config_no_exceptions(self): args = argparse.Namespace(config=None, config_override=None, run_local=False) with staticconf.testing.MockConfiguration(MOCK_CONFIG): setup_config(args, 'test_worker')
'pipeline.et_step.mrjob': mrjob, 'pipeline.load_step.s3_to_redshift_stream': self.log_stream_name, 'pipeline.load_step.days_to_check': 1, 'pipeline.load_step.copy_time_est_secs': copy_time_est_secs, }) return yaml_config def _find_mrjob_with_format(self, log_format): """ Look up the log format in the FORMAT_TO_MRJOB dictionary return path of the mrjob to use """ # TODO: replace FORMAT_TO_MRJOB dict with a dynamo db table mrjob_entry = FORMAT_TO_MRJOB.get(log_format) if mrjob_entry is None: mrjob_entry = FORMAT_TO_MRJOB['custom'].format(log_format) return mrjob_entry if __name__ == "__main__": args = parse_cmd_args(sys.argv) setup_config(args, 'ImdWorker') try: ImdWorker(args.config, args.config_override, args.run_local, Mailer(args.run_local), args.dummy_run).run() except KeyboardInterrupt: pass