def test_menu_links(self): app = create_app(testing=True) [admin] = app.extensions['admin'] category = admin._menu_categories['Test Plugin'] [menu_link] = [ml for ml in category.get_children() if isinstance(ml, MenuLink)] self.assertEqual('Test Menu Link', menu_link.name)
def test_admin_views(self): app = create_app(testing=True) [admin] = app.extensions['admin'] category = admin._menu_categories['Test Plugin'] [admin_view] = [v for v in category.get_children() if isinstance(v, MenuView)] self.assertEqual('Test View', admin_view.name)
def setUp(self): configuration.test_mode() app = create_app() app.config['TESTING'] = True self.parser = cli.get_parser() self.dagbag = models.DagBag( dag_folder=DEV_NULL, include_examples=True)
def setUp(self): super(TestLogView, self).setUp() # Create a custom logging configuration configuration.load_test_config() logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG) current_dir = os.path.dirname(os.path.abspath(__file__)) logging_config['handlers']['task']['base_log_folder'] = os.path.normpath( os.path.join(current_dir, 'test_logs')) logging_config['handlers']['task']['filename_template'] = \ '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(":", ".") }}/{{ try_number }}.log' # Write the custom logging configuration to a file self.settings_folder = tempfile.mkdtemp() settings_file = os.path.join(self.settings_folder, "airflow_local_settings.py") new_logging_file = "LOGGING_CONFIG = {}".format(logging_config) with open(settings_file, 'w') as handle: handle.writelines(new_logging_file) sys.path.append(self.settings_folder) conf.set('core', 'logging_config_class', 'airflow_local_settings.LOGGING_CONFIG') app = application.create_app(testing=True) self.app = app.test_client() self.session = Session() from airflow.www.views import dagbag dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE) task = DummyOperator(task_id=self.TASK_ID, dag=dag) dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag) ti = TaskInstance(task=task, execution_date=self.DEFAULT_DATE) ti.try_number = 1 self.session.merge(ti) self.session.commit()
def setUp(self): conf.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client() self.session = Session() models.DagBag().get_dag("example_bash_operator").sync_to_db()
def test_action_logging_with_invalid_user(self, mocked_session, mocked_get_user): anonymous_username = '******' # When the user returned by flask login_manager._load_user # is invalid. mocked_current_user = mock.MagicMock() mocked_get_user.return_value = mocked_current_user mocked_current_user.user = None mocked_session_instance = mock.MagicMock() mocked_session.return_value = mocked_session_instance app = application.create_app(testing=True) # Patching here to avoid errors in applicant.create_app with mock.patch("airflow.models.Log") as mocked_log: with app.test_request_context(): @utils.action_logging def some_func(): pass some_func() mocked_log.assert_called_once() (args, kwargs) = mocked_log.call_args_list[0] self.assertEqual('some_func', kwargs['event']) self.assertEqual(anonymous_username, kwargs['owner']) mocked_session_instance.add.assert_called_once()
def setUp(self): conf.load_test_config() self.app, self.appbuilder = application.create_app(session=Session, testing=True) self.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' self.app.config['SECRET_KEY'] = 'secret_key' self.app.config['CSRF_ENABLED'] = False self.app.config['WTF_CSRF_ENABLED'] = False self.client = self.app.test_client() settings.configure_orm() self.session = Session
def setUp(self): reset() configuration.load_test_config() app = application.create_app() app.config['TESTING'] = True self.app = app.test_client() self.dagbag = models.DagBag( dag_folder=DEV_NULL, include_examples=True) self.dag_bash = self.dagbag.dags['example_bash_operator'] self.runme_0 = self.dag_bash.get_task('runme_0')
def setUp(self): super(TestChartModelView, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client() self.session = Session() self.chart = { 'label': 'chart', 'owner': 'airflow', 'conn_id': 'airflow_ci', }
def setUp(self): super(TestVariableView, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client() self.session = Session() self.variable = { 'key': 'test_key', 'val': 'text_val', 'is_encrypted': True }
def setUp(self): super(TestPoolModelView, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client() self.session = Session() self.pool = { 'pool': 'test-pool', 'slots': 777, 'description': 'test-pool-description', }
def setUp(self): super(TestKnownEventView, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client() self.session = Session() self.known_event = { 'label': 'event-label', 'event_type': '1', 'start_date': '2017-06-05 12:00:00', 'end_date': '2017-06-05 13:00:00', 'reported_by': self.user_id, 'description': '', }
def setUp(self): configuration.load_test_config() try: configuration.conf.add_section("api") except: pass configuration.conf.set("api", "auth_backend", "airflow.api.auth.backend.kerberos_auth") try: configuration.conf.add_section("kerberos") except: pass configuration.conf.set("kerberos", "keytab", os.environ['KRB5_KTNAME']) self.app = application.create_app(testing=True)
def setUp(self): configuration.conf.set("webserver", "authenticate", "True") configuration.conf.set("webserver", "auth_backend", "airflow.contrib.auth.backends.ldap_auth") try: configuration.conf.add_section("ldap") except: pass configuration.conf.set("ldap", "uri", "ldap://localhost:3890") configuration.conf.set("ldap", "user_filter", "objectClass=*") configuration.conf.set("ldap", "user_name_attr", "uid") configuration.conf.set("ldap", "bind_user", "cn=Manager,dc=example,dc=com") configuration.conf.set("ldap", "bind_password", "insecure") configuration.conf.set("ldap", "basedn", "dc=example,dc=com") configuration.conf.set("ldap", "cacert", "") app = application.create_app() app.config['TESTING'] = True self.app = app.test_client()
def setUp(self): configuration.conf.set("webserver", "authenticate", "True") configuration.conf.set("webserver", "auth_backend", "airflow.contrib.auth.backends.password_auth") app = application.create_app() app.config['TESTING'] = True self.app = app.test_client() from airflow.contrib.auth.backends.password_auth import PasswordUser session = Session() user = models.User() password_user = PasswordUser(user) password_user.username = '******' password_user.password = '******' print(password_user._password) session.add(password_user) session.commit() session.close()
def setUp(self): super(TestPoolApiExperimental, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) self.app = app.test_client() self.session = Session() self.pools = [] for i in range(2): name = 'experimental_%s' % (i + 1) pool = Pool( pool=name, slots=i, description=name, ) self.session.add(pool) self.pools.append(pool) self.session.commit() self.pool = self.pools[0]
def setUp(self): configuration.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client() self.session = Session() from airflow.www.views import dagbag from airflow.utils.state import State dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE) dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag) self.runs = [] for rd in self.RUNS_DATA: run = dag.create_dagrun( run_id=rd[0], execution_date=rd[1], state=State.SUCCESS, external_trigger=True ) self.runs.append(run)
def webserver(args): print(settings.HEADER) log_to_stdout() from airflow.www.app import create_app app = create_app(conf) threads = args.threads or conf.get('webserver', 'threads') if args.debug: print( "Starting the web server on port {0} and host {1}.".format( args.port, args.hostname)) app.run(debug=True, port=args.port, host=args.hostname) else: print( 'Running the Gunicorn server with {threads}' 'on host {args.hostname} and port ' '{args.port}...'.format(**locals())) sp = subprocess.Popen([ 'gunicorn', '-w', str(args.threads), '-t', '120', '-b', args.hostname + ':' + str(args.port), 'airflow.www.app:create_app()']) sp.wait()
def setUp(self): configuration.load_test_config() try: configuration.conf.add_section("api") except DuplicateSectionError: pass configuration.conf.set("api", "auth_backend", "airflow.contrib.auth.backends.password_auth") self.app = application.create_app(testing=True) session = Session() user = models.User() password_user = PasswordUser(user) password_user.username = '******' password_user.password = '******' session.add(password_user) session.commit() session.close()
def webserver(args): """Starts Airflow Webserver""" print(settings.HEADER) access_logfile = args.access_logfile or conf.get('webserver', 'access_logfile') error_logfile = args.error_logfile or conf.get('webserver', 'error_logfile') num_workers = args.workers or conf.get('webserver', 'workers') worker_timeout = (args.worker_timeout or conf.get('webserver', 'web_server_worker_timeout')) ssl_cert = args.ssl_cert or conf.get('webserver', 'web_server_ssl_cert') ssl_key = args.ssl_key or conf.get('webserver', 'web_server_ssl_key') if not ssl_cert and ssl_key: raise AirflowException( 'An SSL certificate must also be provided for use with ' + ssl_key) if ssl_cert and not ssl_key: raise AirflowException( 'An SSL key must also be provided for use with ' + ssl_cert) if args.debug: print("Starting the web server on port {0} and host {1}.".format( args.port, args.hostname)) app = create_app(testing=conf.getboolean('core', 'unit_test_mode')) app.run(debug=True, use_reloader=not app.config['TESTING'], port=args.port, host=args.hostname, ssl_context=(ssl_cert, ssl_key) if ssl_cert and ssl_key else None) else: # This pre-warms the cache, and makes possible errors # get reported earlier (i.e. before demonization) os.environ['SKIP_DAGS_PARSING'] = 'True' app = cached_app(None) os.environ.pop('SKIP_DAGS_PARSING') pid_file, stdout, stderr, log_file = setup_locations( "webserver", args.pid, args.stdout, args.stderr, args.log_file) # Check if webserver is already running if not, remove old pidfile check_if_pidfile_process_is_running(pid_file=pid_file, process_name="webserver") print( textwrap.dedent('''\ Running the Gunicorn Server with: Workers: {num_workers} {workerclass} Host: {hostname}:{port} Timeout: {worker_timeout} Logfiles: {access_logfile} {error_logfile} =================================================================\ '''.format(num_workers=num_workers, workerclass=args.workerclass, hostname=args.hostname, port=args.port, worker_timeout=worker_timeout, access_logfile=access_logfile, error_logfile=error_logfile))) run_args = [ 'gunicorn', '--workers', str(num_workers), '--worker-class', str(args.workerclass), '--timeout', str(worker_timeout), '--bind', args.hostname + ':' + str(args.port), '--name', 'airflow-webserver', '--pid', pid_file, '--config', 'python:airflow.www.gunicorn_config', ] if args.access_logfile: run_args += ['--access-logfile', str(args.access_logfile)] if args.error_logfile: run_args += ['--error-logfile', str(args.error_logfile)] if args.daemon: run_args += ['--daemon'] if ssl_cert: run_args += ['--certfile', ssl_cert, '--keyfile', ssl_key] run_args += ["airflow.www.app:cached_app()"] gunicorn_master_proc = None def kill_proc(dummy_signum, dummy_frame): # pylint: disable=unused-argument gunicorn_master_proc.terminate() gunicorn_master_proc.wait() sys.exit(0) def monitor_gunicorn(gunicorn_master_proc): # Register signal handlers signal.signal(signal.SIGINT, kill_proc) signal.signal(signal.SIGTERM, kill_proc) # These run forever until SIG{INT, TERM, KILL, ...} signal is sent GunicornMonitor( gunicorn_master_proc=gunicorn_master_proc, num_workers_expected=num_workers, master_timeout=conf.getint('webserver', 'web_server_master_timeout'), worker_refresh_interval=conf.getint('webserver', 'worker_refresh_interval', fallback=10), worker_refresh_batch_size=conf.getint( 'webserver', 'worker_refresh_batch_size', fallback=1), reload_on_plugin_change=conf.getint('webserver', 'reload_on_plugin_change', fallback=1), ).start() if args.daemon: handle = setup_logging(log_file) base, ext = os.path.splitext(pid_file) with open(stdout, 'w+') as stdout, open(stderr, 'w+') as stderr: ctx = daemon.DaemonContext( pidfile=TimeoutPIDLockFile(f"{base}-monitor{ext}", -1), files_preserve=[handle], stdout=stdout, stderr=stderr, ) with ctx: subprocess.Popen(run_args, close_fds=True) # Reading pid of gunicorn master as it will be different that # the one of process spawned above. while True: sleep(0.1) gunicorn_master_proc_pid = read_pid_from_pidfile( pid_file) if gunicorn_master_proc_pid: break # Run Gunicorn monitor gunicorn_master_proc = psutil.Process( gunicorn_master_proc_pid) monitor_gunicorn(gunicorn_master_proc) else: gunicorn_master_proc = subprocess.Popen(run_args, close_fds=True) monitor_gunicorn(gunicorn_master_proc)
def setUp(self): configuration.test_mode() configuration.conf.set("webserver", "authenticate", "False") app = application.create_app() app.config['TESTING'] = True self.app = app.test_client()
def test_flask_blueprints(self): app = create_app(testing=True) self.assertIsInstance(app.blueprints['test_plugin'], Blueprint)
def setUp(self): configuration.test_mode() app = application.create_app() app.config['TESTING'] = True self.parser = cli.get_parser() self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True)
def test_constructor_no_proxyfix(self): app, _ = application.create_app(session=Session, testing=True) self.assertFalse(isinstance(app.wsgi_app, ProxyFix))
def setUp(self): conf.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client()
def setUp(self): self.app, self.appbuilder = application.create_app(testing=True)
def setUpClass(cls) -> None: super().setUpClass() cls.app = app.create_app(testing=True) # type:ignore
def setUpClass(cls): settings.configure_orm() cls.session = settings.Session cls.app = app.create_app(testing=True)
def setUp(self): super(TestTaskInstanceView, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client()
def setUp(self): self.app = application.create_app(testing=True)
def test_should_response_200_serialized(self): # Create empty app with empty dagbag to check if DAG is read from db with conf_vars({ ("api", "auth_backend"): "tests.test_utils.remote_user_api_auth_backend" }): app_serialized = app.create_app(testing=True) dag_bag = DagBag(os.devnull, include_examples=False, read_dags_from_db=True) app_serialized.dag_bag = dag_bag client = app_serialized.test_client() SerializedDagModel.write_dag(self.dag) expected = { "catchup": True, "concurrency": 16, "dag_id": "test_dag", "dag_run_timeout": None, "default_view": "tree", "description": None, "doc_md": "details", "fileloc": __file__, "is_paused": None, "is_subdag": False, "orientation": "LR", "owners": [], "schedule_interval": { "__type": "TimeDelta", "days": 1, "microseconds": 0, "seconds": 0, }, "start_date": "2020-06-15T00:00:00+00:00", "tags": None, "timezone": "Timezone('UTC')", } response = client.get(f"/api/v1/dags/{self.dag_id}/details", environ_overrides={'REMOTE_USER': "******"}) assert response.status_code == 200 assert response.json == expected response = self.client.get(f"/api/v1/dags/{self.dag_id}/details", environ_overrides={'REMOTE_USER': "******"}) assert response.status_code == 200 expected = { 'catchup': True, 'concurrency': 16, 'dag_id': 'test_dag', 'dag_run_timeout': None, 'default_view': 'tree', 'description': None, 'doc_md': 'details', 'fileloc': __file__, 'is_paused': None, 'is_subdag': False, 'orientation': 'LR', 'owners': [], 'schedule_interval': { '__type': 'TimeDelta', 'days': 1, 'microseconds': 0, 'seconds': 0 }, 'start_date': '2020-06-15T00:00:00+00:00', 'tags': None, 'timezone': "Timezone('UTC')", } assert response.json == expected
def setUp(self): super(TestDagRunsEndpoint, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) self.app = app.test_client()
def setUp(self): app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client() self.session = Session() models.DagBag().get_dag("example_bash_operator").sync_to_db()
def tearDownClass(cls) -> None: delete_user(cls.app, username="******") # type: ignore cls.app = app.create_app(testing=True) # type:ignore
def setUp(self): super(TestApiExperimental, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) self.app = app.test_client()
def setUp(self): super().setUp() app, _ = application.create_app(testing=True) self.app = app.test_client()
def setUp(self): from airflow.www import app as application self.app = application.create_app(testing=True) self.appbuilder = self.app.appbuilder # pylint: disable=no-member self.clear_roles_and_roles()
def setUp(self): from airflow import configuration configuration.load_test_config() from airflow.www import app as application app = application.create_app(testing=True) self.app = app.test_client()
def setUp(self): conf.load_test_config() self.app, self.appbuilder = application.create_app(testing=True)
def setUp(self): from airflow.www import app as application self.app, self.appbuilder = application.create_app(session=Session, testing=True)
def setUp(self): super(TestDagRunsEndpoint, self).setUp() configuration.load_test_config() app, _ = application.create_app(testing=True) self.app = app.test_client()
def setUp(self): self.app = application.create_app(testing=True) self.appbuilder = self.app.appbuilder # pylint: disable=no-member
def setUpClass(cls) -> None: super().setUpClass() with mock.patch.dict("os.environ", SKIP_DAGS_PARSING="True"): cls.app = app.create_app(testing=True) # type:ignore
def setUp(self): configuration.test_mode() app = create_app() app.config['TESTING'] = True self.app = app.test_client()
def setUp(self): from airflow.www import app as application self.app, self.appbuilder = application.create_app(testing=True) self.clear_roles_and_roles()
def test_should_response_200_serialized(self): # Create empty app with empty dagbag to check if DAG is read from db app_serialized = app.create_app(testing=True) dag_bag = DagBag(os.devnull, include_examples=False, read_dags_from_db=True) app_serialized.dag_bag = dag_bag client = app_serialized.test_client() SerializedDagModel.write_dag(self.dag) expected = { 'catchup': True, 'concurrency': 16, 'dag_id': 'test_dag', 'dag_run_timeout': None, 'default_view': 'tree', 'description': None, 'doc_md': 'details', 'fileloc': __file__, 'is_paused': None, 'is_subdag': False, 'orientation': 'LR', 'owners': [], 'schedule_interval': { '__type': 'TimeDelta', 'days': 1, 'microseconds': 0, 'seconds': 0 }, 'start_date': '2020-06-15T00:00:00+00:00', 'tags': None, 'timezone': "Timezone('UTC')" } response = client.get(f"/api/v1/dags/{self.dag_id}/details", environ_overrides={'REMOTE_USER': "******"}) assert response.status_code == 200 assert response.json == expected response = self.client.get(f"/api/v1/dags/{self.dag_id}/details", environ_overrides={'REMOTE_USER': "******"}) assert response.status_code == 200 expected = { 'catchup': True, 'concurrency': 16, 'dag_id': 'test_dag', 'dag_run_timeout': None, 'default_view': 'tree', 'description': None, 'doc_md': 'details', 'fileloc': __file__, 'is_paused': None, 'is_subdag': False, 'orientation': 'LR', 'owners': [], 'schedule_interval': { '__type': 'TimeDelta', 'days': 1, 'microseconds': 0, 'seconds': 0 }, 'start_date': '2020-06-15T00:00:00+00:00', 'tags': None, 'timezone': "Timezone('UTC')" } assert response.json == expected
def setUpClass(cls) -> None: super().setUpClass() with conf_vars({("api", "auth_backend"): "tests.test_utils.remote_user_api_auth_backend"}): cls.app = app.create_app(testing=True) # type:ignore # TODO: Add new role for each view to test permission. create_user(cls.app, username="******", role="Admin") # type: ignore
def webserver(args): """Starts Airflow Webserver""" print(settings.HEADER) # Check for old/insecure config, and fail safe (i.e. don't launch) if the config is wildly insecure. if conf.get('webserver', 'secret_key') == 'temporary_key': from rich import print as rich_print rich_print( "[red][bold]ERROR:[/bold] The `secret_key` setting under the webserver config has an insecure " "value - Airflow has failed safe and refuses to start. Please change this value to a new, " "per-environment, randomly generated string, for example using this command `[cyan]openssl rand " "-hex 30[/cyan]`", file=sys.stderr, ) sys.exit(1) access_logfile = args.access_logfile or conf.get('webserver', 'access_logfile') error_logfile = args.error_logfile or conf.get('webserver', 'error_logfile') access_logformat = args.access_logformat or conf.get( 'webserver', 'access_logformat') num_workers = args.workers or conf.get('webserver', 'workers') worker_timeout = args.worker_timeout or conf.get( 'webserver', 'web_server_worker_timeout') ssl_cert = args.ssl_cert or conf.get('webserver', 'web_server_ssl_cert') ssl_key = args.ssl_key or conf.get('webserver', 'web_server_ssl_key') if not ssl_cert and ssl_key: raise AirflowException( 'An SSL certificate must also be provided for use with ' + ssl_key) if ssl_cert and not ssl_key: raise AirflowException( 'An SSL key must also be provided for use with ' + ssl_cert) if args.debug: print( f"Starting the web server on port {args.port} and host {args.hostname}." ) app = create_app(testing=conf.getboolean('core', 'unit_test_mode')) app.run( debug=True, use_reloader=not app.config['TESTING'], port=args.port, host=args.hostname, ssl_context=(ssl_cert, ssl_key) if ssl_cert and ssl_key else None, ) else: # This pre-warms the cache, and makes possible errors # get reported earlier (i.e. before demonization) os.environ['SKIP_DAGS_PARSING'] = 'True' app = cached_app(None) os.environ.pop('SKIP_DAGS_PARSING') pid_file, stdout, stderr, log_file = setup_locations( "webserver", args.pid, args.stdout, args.stderr, args.log_file) # Check if webserver is already running if not, remove old pidfile check_if_pidfile_process_is_running(pid_file=pid_file, process_name="webserver") print( textwrap.dedent('''\ Running the Gunicorn Server with: Workers: {num_workers} {workerclass} Host: {hostname}:{port} Timeout: {worker_timeout} Logfiles: {access_logfile} {error_logfile} Access Logformat: {access_logformat} =================================================================\ '''.format( num_workers=num_workers, workerclass=args.workerclass, hostname=args.hostname, port=args.port, worker_timeout=worker_timeout, access_logfile=access_logfile, error_logfile=error_logfile, access_logformat=access_logformat, ))) run_args = [ 'gunicorn', '--workers', str(num_workers), '--worker-class', str(args.workerclass), '--timeout', str(worker_timeout), '--bind', args.hostname + ':' + str(args.port), '--name', 'airflow-webserver', '--pid', pid_file, '--config', 'python:airflow.www.gunicorn_config', ] if args.access_logfile: run_args += ['--access-logfile', str(args.access_logfile)] if args.error_logfile: run_args += ['--error-logfile', str(args.error_logfile)] if args.access_logformat and args.access_logformat.strip(): run_args += ['--access-logformat', str(args.access_logformat)] if args.daemon: run_args += ['--daemon'] if ssl_cert: run_args += ['--certfile', ssl_cert, '--keyfile', ssl_key] run_args += ["airflow.www.app:cached_app()"] gunicorn_master_proc = None def kill_proc(signum, _): log.info("Received signal: %s. Closing gunicorn.", signum) gunicorn_master_proc.terminate() with suppress(TimeoutError): gunicorn_master_proc.wait(timeout=30) if gunicorn_master_proc.poll() is not None: gunicorn_master_proc.kill() sys.exit(0) def monitor_gunicorn(gunicorn_master_pid: int): # Register signal handlers signal.signal(signal.SIGINT, kill_proc) signal.signal(signal.SIGTERM, kill_proc) # These run forever until SIG{INT, TERM, KILL, ...} signal is sent GunicornMonitor( gunicorn_master_pid=gunicorn_master_pid, num_workers_expected=num_workers, master_timeout=conf.getint('webserver', 'web_server_master_timeout'), worker_refresh_interval=conf.getint('webserver', 'worker_refresh_interval', fallback=30), worker_refresh_batch_size=conf.getint( 'webserver', 'worker_refresh_batch_size', fallback=1), reload_on_plugin_change=conf.getboolean( 'webserver', 'reload_on_plugin_change', fallback=False), ).start() if args.daemon: handle = setup_logging(log_file) base, ext = os.path.splitext(pid_file) with open(stdout, 'w+') as stdout, open(stderr, 'w+') as stderr: ctx = daemon.DaemonContext( pidfile=TimeoutPIDLockFile(f"{base}-monitor{ext}", -1), files_preserve=[handle], stdout=stdout, stderr=stderr, ) with ctx: subprocess.Popen(run_args, close_fds=True) # Reading pid of gunicorn master as it will be different that # the one of process spawned above. while True: sleep(0.1) gunicorn_master_proc_pid = read_pid_from_pidfile( pid_file) if gunicorn_master_proc_pid: break # Run Gunicorn monitor gunicorn_master_proc = psutil.Process( gunicorn_master_proc_pid) monitor_gunicorn(gunicorn_master_proc.pid) else: with subprocess.Popen(run_args, close_fds=True) as gunicorn_master_proc: monitor_gunicorn(gunicorn_master_proc.pid)
def setUpClass(cls): super(TestConnectionModelView, cls).setUpClass() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] cls.app = app.test_client()
def factory(): # Make sure we don't issue a warning in the test summary about deprecation with pytest.deprecated_call(): return app.create_app(testing=True) # type:ignore
def setUp(self): super(TestVarImportView, self).setUp() configuration.load_test_config() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client()
def app(): from airflow.www import app return app.create_app(testing=True)
def setUp(self): super(TestVarImportView, self).setUp() app = application.create_app(testing=True) app.config['WTF_CSRF_METHODS'] = [] self.app = app.test_client()