예제 #1
0
def insert_sqlite_hook():
    sqlite_hook = SqliteHook()

    rows = [('James', '11'), ('James', '22'), ('James', '33')]
    target_fields = ['first_name', 'last_name']
    sqlite_hook.insert_rows(table='Customers',
                            rows=rows,
                            target_fields=target_fields)
예제 #2
0
def replace_sqlite_hook():
    sqlite_hook = SqliteHook("sqlite_default")

    rows = [('James', '11'), ('James', '22'), ('James', '33')]
    target_fields = ['first_name', 'last_name']
    sqlite_hook.insert_rows(table='Customers',
                            rows=rows,
                            target_fields=target_fields,
                            replace=True)
예제 #3
0
    def test_env_var_priority(self):
        conn = SqliteHook.get_connection(conn_id='airflow_db')
        self.assertNotEqual('ec2.compute.com', conn.host)

        with mock.patch.dict('os.environ', {
            'AIRFLOW_CONN_AIRFLOW_DB': 'postgres://*****:*****@ec2.compute.com:5432/the_database',
        }):
            conn = SqliteHook.get_connection(conn_id='airflow_db')
            self.assertEqual('ec2.compute.com', conn.host)
            self.assertEqual('the_database', conn.schema)
            self.assertEqual('username', conn.login)
            self.assertEqual('password', conn.password)
            self.assertEqual(5432, conn.port)
 def test_using_unix_socket_env_var(self):
     conn = SqliteHook.get_connection(conn_id='test_uri_no_creds')
     self.assertEqual('ec2.compute.com', conn.host)
     self.assertEqual('the_database', conn.schema)
     self.assertIsNone(conn.login)
     self.assertIsNone(conn.password)
     self.assertIsNone(conn.port)
 def test_using_env_var(self):
     conn = SqliteHook.get_connection(conn_id='test_uri')
     self.assertEqual('ec2.compute.com', conn.host)
     self.assertEqual('the_database', conn.schema)
     self.assertEqual('username', conn.login)
     self.assertEqual('password', conn.password)
     self.assertEqual(5432, conn.port)
예제 #6
0
    def test_env_var_priority(self):
        conn = SqliteHook.get_connection(conn_id='airflow_db')
        assert 'ec2.compute.com' != conn.host

        with mock.patch.dict(
            'os.environ',
            {
                'AIRFLOW_CONN_AIRFLOW_DB': 'postgres://*****:*****@ec2.compute.com:5432/the_database',
            },
        ):
            conn = SqliteHook.get_connection(conn_id='airflow_db')
            assert 'ec2.compute.com' == conn.host
            assert 'the_database' == conn.schema
            assert 'username' == conn.login
            assert 'password' == conn.password
            assert 5432 == conn.port
예제 #7
0
 def test_using_unix_socket_env_var(self):
     conn = SqliteHook.get_connection(conn_id='test_uri_no_creds')
     assert 'ec2.compute.com' == conn.host
     assert 'the_database' == conn.schema
     assert conn.login is None
     assert conn.password is None
     assert conn.port is None
예제 #8
0
 def tearDown(self):
     tables_to_drop = ['test_airflow', 'test_airflow2']
     from airflow.providers.sqlite.hooks.sqlite import SqliteHook
     with SqliteHook().get_conn() as conn:
         cur = conn.cursor()
         for table in tables_to_drop:
             cur.execute(f"DROP TABLE IF EXISTS {table}")
예제 #9
0
 def test_using_env_var(self):
     conn = SqliteHook.get_connection(conn_id='test_uri')
     assert 'ec2.compute.com' == conn.host
     assert 'the_database' == conn.schema
     assert 'username' == conn.login
     assert 'password' == conn.password
     assert 5432 == conn.port
 def test_get_connections_env_var(self):
     conns = SqliteHook.get_connections(conn_id='test_uri')
     assert len(conns) == 1
     assert conns[0].host == 'ec2.compute.com'
     assert conns[0].schema == 'the_database'
     assert conns[0].login == 'username'
     assert conns[0].password == 'password'
     assert conns[0].port == 5432
예제 #11
0
파일: sqlite.py 프로젝트: ypatankar/airflow
 def execute(self, context: Mapping[Any, Any]) -> None:
     self.log.info('Executing: %s', self.sql)
     hook = SqliteHook(sqlite_conn_id=self.sqlite_conn_id)
     hook.run(self.sql, parameters=self.parameters)
예제 #12
0
파일: sqlite.py 프로젝트: rfreynol/airflow
 def execute(self, context):
     self.log.info('Executing: %s', self.sql)
     hook = SqliteHook(sqlite_conn_id=self.sqlite_conn_id)
     hook.run(self.sql, parameters=self.parameters)
예제 #13
0
 def get_hook(self):
     if self.conn_type == 'mysql':
         from airflow.providers.mysql.hooks.mysql import MySqlHook
         return MySqlHook(mysql_conn_id=self.conn_id)
     elif self.conn_type == 'google_cloud_platform':
         from airflow.gcp.hooks.bigquery import BigQueryHook
         return BigQueryHook(bigquery_conn_id=self.conn_id)
     elif self.conn_type == 'postgres':
         from airflow.providers.postgres.hooks.postgres import PostgresHook
         return PostgresHook(postgres_conn_id=self.conn_id)
     elif self.conn_type == 'pig_cli':
         from airflow.providers.apache.pig.hooks.pig import PigCliHook
         return PigCliHook(pig_cli_conn_id=self.conn_id)
     elif self.conn_type == 'hive_cli':
         from airflow.providers.apache.hive.hooks.hive import HiveCliHook
         return HiveCliHook(hive_cli_conn_id=self.conn_id)
     elif self.conn_type == 'presto':
         from airflow.providers.presto.hooks.presto import PrestoHook
         return PrestoHook(presto_conn_id=self.conn_id)
     elif self.conn_type == 'hiveserver2':
         from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
         return HiveServer2Hook(hiveserver2_conn_id=self.conn_id)
     elif self.conn_type == 'sqlite':
         from airflow.providers.sqlite.hooks.sqlite import SqliteHook
         return SqliteHook(sqlite_conn_id=self.conn_id)
     elif self.conn_type == 'jdbc':
         from airflow.providers.jdbc.hooks.jdbc import JdbcHook
         return JdbcHook(jdbc_conn_id=self.conn_id)
     elif self.conn_type == 'mssql':
         from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
         return MsSqlHook(mssql_conn_id=self.conn_id)
     elif self.conn_type == 'odbc':
         from airflow.providers.odbc.hooks.odbc import OdbcHook
         return OdbcHook(odbc_conn_id=self.conn_id)
     elif self.conn_type == 'oracle':
         from airflow.providers.oracle.hooks.oracle import OracleHook
         return OracleHook(oracle_conn_id=self.conn_id)
     elif self.conn_type == 'vertica':
         from airflow.providers.vertica.hooks.vertica import VerticaHook
         return VerticaHook(vertica_conn_id=self.conn_id)
     elif self.conn_type == 'cloudant':
         from airflow.providers.cloudant.hooks.cloudant import CloudantHook
         return CloudantHook(cloudant_conn_id=self.conn_id)
     elif self.conn_type == 'jira':
         from airflow.providers.jira.hooks.jira import JiraHook
         return JiraHook(jira_conn_id=self.conn_id)
     elif self.conn_type == 'redis':
         from airflow.providers.redis.hooks.redis import RedisHook
         return RedisHook(redis_conn_id=self.conn_id)
     elif self.conn_type == 'wasb':
         from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
         return WasbHook(wasb_conn_id=self.conn_id)
     elif self.conn_type == 'docker':
         from airflow.providers.docker.hooks.docker import DockerHook
         return DockerHook(docker_conn_id=self.conn_id)
     elif self.conn_type == 'azure_data_lake':
         from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook
         return AzureDataLakeHook(azure_data_lake_conn_id=self.conn_id)
     elif self.conn_type == 'azure_cosmos':
         from airflow.providers.microsoft.azure.hooks.azure_cosmos import AzureCosmosDBHook
         return AzureCosmosDBHook(azure_cosmos_conn_id=self.conn_id)
     elif self.conn_type == 'cassandra':
         from airflow.providers.apache.cassandra.hooks.cassandra import CassandraHook
         return CassandraHook(cassandra_conn_id=self.conn_id)
     elif self.conn_type == 'mongo':
         from airflow.providers.mongo.hooks.mongo import MongoHook
         return MongoHook(conn_id=self.conn_id)
     elif self.conn_type == 'gcpcloudsql':
         from airflow.gcp.hooks.cloud_sql import CloudSQLDatabaseHook
         return CloudSQLDatabaseHook(gcp_cloudsql_conn_id=self.conn_id)
     elif self.conn_type == 'grpc':
         from airflow.providers.grpc.hooks.grpc import GrpcHook
         return GrpcHook(grpc_conn_id=self.conn_id)
     raise AirflowException("Unknown hook type {}".format(self.conn_type))