def resetdb(args): print("DB: " + repr(settings.engine.url)) if args.yes or input("This will drop existing tables if they exist. " "Proceed? (y/n)").upper() == "Y": logging.basicConfig(level=settings.LOGGING_LEVEL, format=settings.SIMPLE_LOG_FORMAT) db_utils.resetdb() else: print("Bail.")
def initial_db_init(): if os.environ.get("RUN_AIRFLOW_1_10") == "true": print("Attempting to reset the db using airflow command") os.system("airflow resetdb -y") else: from airflow.utils import db db.resetdb()
def reset_db(): """ Wrapper function that calls the airflows resetdb function. """ from airflow.utils.db import resetdb resetdb()
def resetdb(args): """Resets the metadata database""" print("DB: " + repr(settings.engine.url)) if args.yes or input("This will drop existing tables if they exist. Proceed? (y/n)").upper() == "Y": db.resetdb() else: print("Cancelled")
def breeze_test_helper(request): """ Helper that setups Airflow testing environment. It does the same thing as the old 'run-tests' script. """ # fixme: this should use some other env variable ex. RUNNING_ON_K8S if os.environ.get("SKIP_INIT_DB"): print("Skipping db initialization. Tests do not require database") return print(" AIRFLOW ".center(60, "=")) # Setup test environment for breeze home = os.getcwd() airflow_home = os.environ.get("AIRFLOW_HOME") or home os.environ["AIRFLOW_SOURCES"] = home os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = os.path.join( home, "tests", "dags") os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True" os.environ["AWS_DEFAULT_REGION"] = (os.environ.get("AWS_DEFAULT_REGION") or "us-east-1") print(f"Airflow home {airflow_home}\nHome of the user: {home}") # Initialize Airflow db if required pid_file = os.path.join(home, ".airflow_db_initialised") if request.config.option.db_init: print("Initializing the DB - forced with --with-db-init switch.") try: db.initdb() except: # pylint: disable=bare-except # noqa print( "Skipping db initialization because database already exists.") db.resetdb() elif not os.path.exists(pid_file): print( "Initializing the DB - first time after entering the container.\n" "You can force re-initialization the database by adding --with-db-init switch to run-tests." ) try: db.initdb() except: # pylint: disable=bare-except # noqa print( "Skipping db initialization because database already exists.") db.resetdb() # Create pid file with open(pid_file, "w+"): pass else: print( "Skipping initializing of the DB as it was initialized already.\n" "You can re-initialize the database by adding --with-db-init flag when running tests." ) # Initialize kerberos kerberos = os.environ.get("KRB5_KTNAME") if kerberos: subprocess.call(["kinit", "-kt", kerberos, "airflow"])
def reset_db(): """ Resets Airflow db. """ from airflow.utils import db db.resetdb() yield
def resetdb(args): """Resets the metadata database""" print("DB: " + repr(settings.engine.url)) if not (args.yes or input( "This will drop existing tables if they exist. Proceed? (y/n)"). upper() == "Y"): raise SystemExit("Cancelled") db.resetdb(skip_init=args.skip_init)
def dagbag(): os.environ['AIRFLOW__CORE__SQL_ALCHEMY_CONN'] = 'sqlite://' os.environ['MARQUEZ_NAMESPACE'] = 'test-marquez' from airflow import settings import airflow.utils.db as db_utils db_utils.resetdb(settings.RBAC) from airflow.models import DagBag dagbag = DagBag(include_examples=False) return dagbag
def reset_db_fixture(): from airflow.utils.db import resetdb try: # Airflow 1.10 resetdb(None) # pylint: disable=too-many-function-args except TypeError: # Airflow 2.0 resetdb() # pylint: disable=no-value-for-parameter yield
def setUp(self) -> None: """ We want to avoid random errors while database got reset - those Are apparently triggered by parser trying to parse DAGs while The tables are dropped. We move the dags temporarily out of the dags folder and move them back after reset """ dag_folder = resolve_dags_folder() with empty_dags_directory(dag_folder): db.resetdb() super().setUp()
def setUp(self): if self.require_local_executor: self._check_local_executor_setup() try: # We want to avoid random errors while database got reset - those # Are apparently triggered by parser trying to parse DAGs while # The tables are dropped. We move the dags temporarily out of the dags folder # and move them back after reset self._store_dags_to_temporary_directory() try: db_utils.resetdb(settings.RBAC) finally: self._restore_dags_from_temporary_directory() self._symlink_dag_and_associated_files() super(DagGcpSystemTestCase, self).setUp() except Exception as e: # In case of any error during setup - restore the authentication self.gcp_authenticator.gcp_restore_authentication() raise e
def setUp(self): if self.require_local_executor: self._check_local_executor_setup() try: # We want to avoid random errors while database got reset - those # Are apparently triggered by parser trying to parse DAGs while # The tables are dropped. We move the dags temporarily out of the dags folder # and move them back after reset self._store_dags_to_temporary_directory() try: db_utils.upgradedb() db_utils.resetdb() finally: self._restore_dags_from_temporary_directory() self._symlink_dag_and_associated_files() super(DagGcpSystemTestCase, self).setUp() except Exception as e: # In case of any error during setup - restore the authentication self.gcp_authenticator.gcp_restore_authentication() raise e
def tearDownClass(cls): resetdb()
def reset_db(): """ Resets Airflow db. """ db.resetdb(rbac=True) yield
def breeze_test_helper(request): """ Helper that setups Airflow testing environment. It does the same thing as the old 'run-tests' script. """ # fixme: this should use some other env variable ex. RUNNING_ON_K8S if os.environ.get("SKIP_INIT_DB"): print("Skipping db initialization. Tests do not require database") return print(" AIRFLOW ".center(60, "=")) # Setup test environment for breeze home = os.path.expanduser("~") airflow_home = os.environ.get("AIRFLOW_HOME") or os.path.join( home, "airflow") print(f"Home of the user: {home}\nAirflow home {airflow_home}") from airflow.utils import db # Initialize Airflow db if required lock_file = os.path.join(airflow_home, ".airflow_db_initialised") if request.config.option.db_init: print("Initializing the DB - forced with --with-db-init switch.") try: db.initdb() except: # pylint: disable=bare-except # noqa print( "Skipping db initialization because database already exists.") db.resetdb() elif not os.path.exists(lock_file): print( "Initializing the DB - first time after entering the container.\n" "You can force re-initialization the database by adding --with-db-init switch to run-tests." ) try: db.initdb() except: # pylint: disable=bare-except # noqa print( "Skipping db initialization because database already exists.") db.resetdb() # Create pid file with open(lock_file, "w+"): pass else: print( "Skipping initializing of the DB as it was initialized already.\n" "You can re-initialize the database by adding --with-db-init flag when running tests." ) integration_kerberos = os.environ.get("INTEGRATION_KERBEROS") if integration_kerberos == "true": # Initialize kerberos kerberos = os.environ.get("KRB5_KTNAME") if kerberos: subprocess.check_call(["kinit", "-kt", kerberos, "airflow"]) else: print( "Kerberos enabled! Please setup KRB5_KTNAME environment variable" ) sys.exit(1)
def init_db(): from airflow.utils import db # print("Attempting to reset the db using airflow command") # os.system("airflow resetdb -y") db.resetdb({})
def reset_db(): """ Resets Airflow db. """ db.resetdb() yield
def before_tag(context, tag): if tag == 'db': # make sure the test db is clean db_utils.resetdb()
def test_cli_connections_add_delete(self): # TODO: We should not delete the entire database, but only reset the contents of the Connection table. db.resetdb() # Add connections: uri = 'postgresql://*****:*****@host:5432/airflow' with mock.patch('sys.stdout', new_callable=io.StringIO) as mock_stdout: connection_command.connections_add( self.parser.parse_args( ['connections', 'add', 'new1', '--conn_uri=%s' % uri])) connection_command.connections_add( self.parser.parse_args( ['connections', 'add', 'new2', '--conn_uri=%s' % uri])) connection_command.connections_add( self.parser.parse_args([ 'connections', 'add', 'new3', '--conn_uri=%s' % uri, '--conn_extra', "{'extra': 'yes'}" ])) connection_command.connections_add( self.parser.parse_args([ 'connections', 'add', 'new4', '--conn_uri=%s' % uri, '--conn_extra', "{'extra': 'yes'}" ])) connection_command.connections_add( self.parser.parse_args([ 'connections', 'add', 'new5', '--conn_type=hive_metastore', '--conn_login=airflow', '--conn_password=airflow', '--conn_host=host', '--conn_port=9083', '--conn_schema=airflow' ])) connection_command.connections_add( self.parser.parse_args([ 'connections', 'add', 'new6', '--conn_uri', "", '--conn_type=google_cloud_platform', '--conn_extra', "{'extra': 'yes'}" ])) stdout = mock_stdout.getvalue() # Check addition stdout lines = [l for l in stdout.split('\n') if len(l) > 0] self.assertListEqual( lines, [("\tSuccessfully added `conn_id`=new1 : " + "postgresql://*****:*****@host:5432/airflow"), ("\tSuccessfully added `conn_id`=new2 : " + "postgresql://*****:*****@host:5432/airflow"), ("\tSuccessfully added `conn_id`=new3 : " + "postgresql://*****:*****@host:5432/airflow"), ("\tSuccessfully added `conn_id`=new4 : " + "postgresql://*****:*****@host:5432/airflow"), ("\tSuccessfully added `conn_id`=new5 : " + "hive_metastore://airflow:airflow@host:9083/airflow"), ("\tSuccessfully added `conn_id`=new6 : " + "google_cloud_platform://:@:")]) # Attempt to add duplicate with mock.patch('sys.stdout', new_callable=io.StringIO) as mock_stdout: connection_command.connections_add( self.parser.parse_args( ['connections', 'add', 'new1', '--conn_uri=%s' % uri])) stdout = mock_stdout.getvalue() # Check stdout for addition attempt lines = [l for l in stdout.split('\n') if len(l) > 0] self.assertListEqual(lines, [ "\tA connection with `conn_id`=new1 already exists", ]) # Attempt to add without providing conn_uri with self.assertRaises(SystemExit) as exc: connection_command.connections_add( self.parser.parse_args(['connections', 'add', 'new'])) self.assertEqual( exc.exception.code, "The following args are required to add a connection: ['conn_uri or conn_type']" ) # Prepare to add connections session = settings.Session() extra = { 'new1': None, 'new2': None, 'new3': "{'extra': 'yes'}", 'new4': "{'extra': 'yes'}" } # Add connections for index in range(1, 6): conn_id = 'new%s' % index result = (session.query(Connection).filter( Connection.conn_id == conn_id).first()) result = (result.conn_id, result.conn_type, result.host, result.port, result.get_extra()) if conn_id in ['new1', 'new2', 'new3', 'new4']: self.assertEqual( result, (conn_id, 'postgres', 'host', 5432, extra[conn_id])) elif conn_id == 'new5': self.assertEqual( result, (conn_id, 'hive_metastore', 'host', 9083, None)) elif conn_id == 'new6': self.assertEqual(result, (conn_id, 'google_cloud_platform', None, None, "{'extra': 'yes'}")) # Delete connections with mock.patch('sys.stdout', new_callable=io.StringIO) as mock_stdout: connection_command.connections_delete( self.parser.parse_args(['connections', 'delete', 'new1'])) connection_command.connections_delete( self.parser.parse_args(['connections', 'delete', 'new2'])) connection_command.connections_delete( self.parser.parse_args(['connections', 'delete', 'new3'])) connection_command.connections_delete( self.parser.parse_args(['connections', 'delete', 'new4'])) connection_command.connections_delete( self.parser.parse_args(['connections', 'delete', 'new5'])) connection_command.connections_delete( self.parser.parse_args(['connections', 'delete', 'new6'])) stdout = mock_stdout.getvalue() # Check deletion stdout lines = [l for l in stdout.split('\n') if len(l) > 0] self.assertListEqual(lines, [ "\tSuccessfully deleted `conn_id`=new1", "\tSuccessfully deleted `conn_id`=new2", "\tSuccessfully deleted `conn_id`=new3", "\tSuccessfully deleted `conn_id`=new4", "\tSuccessfully deleted `conn_id`=new5", "\tSuccessfully deleted `conn_id`=new6" ]) # Check deletions for index in range(1, 7): conn_id = 'new%s' % index result = (session.query(Connection).filter( Connection.conn_id == conn_id).first()) self.assertTrue(result is None) # Attempt to delete a non-existing connection with mock.patch('sys.stdout', new_callable=io.StringIO) as mock_stdout: connection_command.connections_delete( self.parser.parse_args(['connections', 'delete', 'fake'])) stdout = mock_stdout.getvalue() # Check deletion attempt stdout lines = [l for l in stdout.split('\n') if len(l) > 0] self.assertListEqual(lines, [ "\tDid not find a connection with `conn_id`=fake", ]) session.close()
def reset_db(): from airflow.utils.db import resetdb resetdb()