def scheduler(args): print(settings.HEADER) utils.log_to_stdout() job = jobs.SchedulerJob(dag_id=args.dag_id, subdir=process_subdir(args.subdir), num_runs=args.num_runs, do_pickle=args.do_pickle) job.run()
def scheduler(args): print(settings.HEADER) utils.log_to_stdout() job = jobs.SchedulerJob( dag_id=args.dag_id, subdir=process_subdir(args.subdir), num_runs=args.num_runs, do_pickle=args.do_pickle) job.run()
def test_calling_log_to_stdout_2X_should_add_only_one_stream_handler(self): utils.log_to_stdout() utils.log_to_stdout() root_logger = logging.getLogger() stream_handlers = [h for h in root_logger.handlers if isinstance(h, logging.StreamHandler)] assert len(stream_handlers) == 1
def test_setting_log_level_then_calling_log_should_keep_the_old_value(self): # if the log level is set externally, i.e. either through # --logging-level or anything, then its value should not be overridden # by the default "INFO" in log_to_stdout root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) utils.log_to_stdout() assert root_logger.level == logging.DEBUG
def test(args): utils.log_to_stdout() args.execution_date = dateutil.parser.parse(args.execution_date) dagbag = DagBag(process_subdir(args.subdir)) if args.dag_id not in dagbag.dags: raise AirflowException('dag_id could not be found') dag = dagbag.dags[args.dag_id] task = dag.get_task(task_id=args.task_id) ti = TaskInstance(task, args.execution_date) if args.dry_run: ti.dry_run() else: ti.run(force=True, ignore_dependencies=True, test_mode=True)
def test_calling_log_to_stdout_should_add_one_stream_handler(self): # first resetting the logger root_logger = logging.getLogger() for handler in root_logger.handlers: root_logger.removeHandler(handler) root_logger.setLevel(logging.NOTSET) assert root_logger.level == logging.NOTSET utils.log_to_stdout() stream_handlers = [h for h in root_logger.handlers if isinstance(h, logging.StreamHandler)] assert len(stream_handlers) == 1 assert root_logger.level == utils.LOGGING_LEVEL
def trigger_dag(args): utils.log_to_stdout() session = settings.Session() # TODO: verify dag_id execution_date = datetime.now() dr = session.query(DagRun).filter(DagRun.dag_id == args.dag_id, DagRun.run_id == args.run_id).first() if dr: logging.error("This run_id already exists") else: trigger = DagRun(dag_id=args.dag_id, run_id=args.run_id, execution_date=execution_date, state=State.RUNNING, external_trigger=True) session.add(trigger) logging.info("Created {}".format(trigger)) session.commit()
def trigger_dag(args): utils.log_to_stdout() session = settings.Session() # TODO: verify dag_id execution_date = datetime.now() dr = session.query(DagRun).filter( DagRun.dag_id==args.dag_id, DagRun.run_id==args.run_id).first() if dr: logging.error("This run_id already exists") else: trigger = DagRun( dag_id=args.dag_id, run_id=args.run_id, execution_date=execution_date, state=State.RUNNING, external_trigger=True) session.add(trigger) logging.info("Created {}".format(trigger)) session.commit()
def webserver(args): print(settings.HEADER) utils.log_to_stdout() from airflow.www.app import cached_app app = cached_app(configuration) workers = args.workers or configuration.get('webserver', 'workers') if args.debug: print( "Starting the web server on port {0} and host {1}.".format( args.port, args.hostname)) app.run(debug=True, port=args.port, host=args.hostname) else: print( 'Running the Gunicorn server with {workers} {args.workerclass}' 'workers on host {args.hostname} and port ' '{args.port}...'.format(**locals())) sp = subprocess.Popen([ 'gunicorn', '-w', str(args.workers), '-k', str(args.workerclass), '-t', '120', '-b', args.hostname + ':' + str(args.port), 'airflow.www.app:cached_app()']) sp.wait()
def webserver(args): print(settings.HEADER) utils.log_to_stdout() from airflow.www.app import cached_app app = cached_app(configuration) workers = args.workers or configuration.get('webserver', 'workers') if args.debug: print("Starting the web server on port {0} and host {1}.".format( args.port, args.hostname)) app.run(debug=True, port=args.port, host=args.hostname) else: print('Running the Gunicorn server with {workers} {args.workerclass}' 'workers on host {args.hostname} and port ' '{args.port}...'.format(**locals())) sp = subprocess.Popen([ 'gunicorn', '-w', str(args.workers), '-k', str(args.workerclass), '-t', '120', '-b', args.hostname + ':' + str(args.port), 'airflow.www.app:cached_app()' ]) sp.wait()
def kerberos(args): print(settings.HEADER) utils.log_to_stdout() import airflow.security.kerberos airflow.security.kerberos.run()