コード例 #1
0
 def execute(self, context: 'Context') -> None:
     self.log.info('Executing: %s', self.sql)
     hook = OracleHook(oracle_conn_id=self.oracle_conn_id)
     if self.sql:
         hook.run(self.sql,
                  autocommit=self.autocommit,
                  parameters=self.parameters)
コード例 #2
0
ファイル: test_oracle.py プロジェクト: lgov/airflow
    def setUp(self):
        super().setUp()

        self.connection = Connection(login='******', password='******', host='host', port=1521)

        self.db_hook = OracleHook()
        self.db_hook.get_connection = mock.Mock()
        self.db_hook.get_connection.return_value = self.connection
コード例 #3
0
 def query(self):
     """Queries Oracle and returns a cursor to the results."""
     oracle = OracleHook(oracle_conn_id=self.oracle_conn_id)
     conn = oracle.get_conn()
     cursor = conn.cursor()
     if self.ensure_utc:
         # Ensure TIMESTAMP results are in UTC
         tz_query = "SET time_zone = '+00:00'"
         self.log.info('Executing: %s', tz_query)
         cursor.execute(tz_query)
     self.log.info('Executing: %s', self.sql)
     cursor.execute(self.sql)
     return cursor
コード例 #4
0
    def execute(self, context: dict) -> None:
        oracle_hook = OracleHook(oracle_conn_id=self.oracle_conn_id)
        azure_data_lake_hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id)

        self.log.info("Dumping Oracle query results to local file")
        conn = oracle_hook.get_conn()
        cursor = conn.cursor()  # type: ignore[attr-defined]
        cursor.execute(self.sql, self.sql_params)

        with TemporaryDirectory(prefix='airflow_oracle_to_azure_op_') as temp:
            self._write_temp_file(cursor, os.path.join(temp, self.filename))
            self.log.info("Uploading local file to Azure Data Lake")
            azure_data_lake_hook.upload_file(
                os.path.join(temp, self.filename), os.path.join(self.azure_data_lake_path, self.filename)
            )
        cursor.close()
        conn.close()  # type: ignore[attr-defined]
コード例 #5
0
 def execute(self, context: 'Context') -> Optional[Union[List, Dict]]:
     self.log.info('Executing: %s', self.procedure)
     hook = OracleHook(oracle_conn_id=self.oracle_conn_id)
     return hook.callproc(self.procedure,
                          autocommit=True,
                          parameters=self.parameters)
コード例 #6
0
 def execute(self, context):
     src_hook = OracleHook(oracle_conn_id=self.oracle_source_conn_id)
     dest_hook = OracleHook(oracle_conn_id=self.oracle_destination_conn_id)
     self._execute(src_hook, dest_hook, context)
コード例 #7
0
ファイル: oracle.py プロジェクト: youngyjd/incubator-airflow
 def execute(self, context) -> None:
     self.log.info('Executing: %s', self.procedure)
     hook = OracleHook(oracle_conn_id=self.oracle_conn_id)
     return hook.callproc(self.procedure,
                          autocommit=True,
                          parameters=self.parameters)
コード例 #8
0
ファイル: test_oracle.py プロジェクト: ysk24ok/airflow
class TestOracleHookConn(unittest.TestCase):
    def setUp(self):
        super().setUp()

        self.connection = Connection(login='******',
                                     password='******',
                                     host='host',
                                     schema='schema',
                                     port=1521)

        self.db_hook = OracleHook()
        self.db_hook.get_connection = mock.Mock()
        self.db_hook.get_connection.return_value = self.connection

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_host(self, mock_connect):
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert kwargs['user'] == 'login'
        assert kwargs['password'] == 'password'
        assert kwargs['dsn'] == 'host:1521/schema'

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_host_alternative_port(self, mock_connect):
        self.connection.port = 1522
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert kwargs['user'] == 'login'
        assert kwargs['password'] == 'password'
        assert kwargs['dsn'] == 'host:1522/schema'

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_sid(self, mock_connect):
        dsn_sid = {'dsn': 'ignored', 'sid': 'sid'}
        self.connection.extra = json.dumps(dsn_sid)
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert kwargs['dsn'] == cx_Oracle.makedsn("host", self.connection.port,
                                                  dsn_sid['sid'])

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_service_name(self, mock_connect):
        dsn_service_name = {'dsn': 'ignored', 'service_name': 'service_name'}
        self.connection.extra = json.dumps(dsn_service_name)
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert kwargs['dsn'] == cx_Oracle.makedsn(
            "host",
            self.connection.port,
            service_name=dsn_service_name['service_name'])

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_encoding_without_nencoding(self, mock_connect):
        self.connection.extra = json.dumps({'encoding': 'UTF-8'})
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert kwargs['encoding'] == 'UTF-8'
        assert kwargs['nencoding'] == 'UTF-8'

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_encoding_with_nencoding(self, mock_connect):
        self.connection.extra = json.dumps({
            'encoding': 'UTF-8',
            'nencoding': 'gb2312'
        })
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert kwargs['encoding'] == 'UTF-8'
        assert kwargs['nencoding'] == 'gb2312'

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_nencoding(self, mock_connect):
        self.connection.extra = json.dumps({'nencoding': 'UTF-8'})
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert 'encoding' not in kwargs
        assert kwargs['nencoding'] == 'UTF-8'

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_mode(self, mock_connect):
        mode = {
            'sysdba': cx_Oracle.SYSDBA,
            'sysasm': cx_Oracle.SYSASM,
            'sysoper': cx_Oracle.SYSOPER,
            'sysbkp': cx_Oracle.SYSBKP,
            'sysdgd': cx_Oracle.SYSDGD,
            'syskmt': cx_Oracle.SYSKMT,
        }
        first = True
        for mod in mode:
            self.connection.extra = json.dumps({'mode': mod})
            self.db_hook.get_conn()
            if first:
                assert mock_connect.call_count == 1
                first = False
            args, kwargs = mock_connect.call_args
            assert args == ()
            assert kwargs['mode'] == mode.get(mod)

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_threaded(self, mock_connect):
        self.connection.extra = json.dumps({'threaded': True})
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert kwargs['threaded'] is True

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_events(self, mock_connect):
        self.connection.extra = json.dumps({'events': True})
        self.db_hook.get_conn()
        assert mock_connect.call_count == 1
        args, kwargs = mock_connect.call_args
        assert args == ()
        assert kwargs['events'] is True

    @mock.patch('airflow.providers.oracle.hooks.oracle.cx_Oracle.connect')
    def test_get_conn_purity(self, mock_connect):
        purity = {
            'new': cx_Oracle.ATTR_PURITY_NEW,
            'self': cx_Oracle.ATTR_PURITY_SELF,
            'default': cx_Oracle.ATTR_PURITY_DEFAULT,
        }
        first = True
        for pur in purity:
            self.connection.extra = json.dumps({'purity': pur})
            self.db_hook.get_conn()
            if first:
                assert mock_connect.call_count == 1
                first = False
            args, kwargs = mock_connect.call_args
            assert args == ()
            assert kwargs['purity'] == purity.get(pur)
コード例 #9
0
 def get_hook(self):
     if self.conn_type == 'mysql':
         from airflow.providers.mysql.hooks.mysql import MySqlHook
         return MySqlHook(mysql_conn_id=self.conn_id)
     elif self.conn_type == 'google_cloud_platform':
         from airflow.gcp.hooks.bigquery import BigQueryHook
         return BigQueryHook(bigquery_conn_id=self.conn_id)
     elif self.conn_type == 'postgres':
         from airflow.providers.postgres.hooks.postgres import PostgresHook
         return PostgresHook(postgres_conn_id=self.conn_id)
     elif self.conn_type == 'pig_cli':
         from airflow.providers.apache.pig.hooks.pig import PigCliHook
         return PigCliHook(pig_cli_conn_id=self.conn_id)
     elif self.conn_type == 'hive_cli':
         from airflow.providers.apache.hive.hooks.hive import HiveCliHook
         return HiveCliHook(hive_cli_conn_id=self.conn_id)
     elif self.conn_type == 'presto':
         from airflow.providers.presto.hooks.presto import PrestoHook
         return PrestoHook(presto_conn_id=self.conn_id)
     elif self.conn_type == 'hiveserver2':
         from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
         return HiveServer2Hook(hiveserver2_conn_id=self.conn_id)
     elif self.conn_type == 'sqlite':
         from airflow.providers.sqlite.hooks.sqlite import SqliteHook
         return SqliteHook(sqlite_conn_id=self.conn_id)
     elif self.conn_type == 'jdbc':
         from airflow.providers.jdbc.hooks.jdbc import JdbcHook
         return JdbcHook(jdbc_conn_id=self.conn_id)
     elif self.conn_type == 'mssql':
         from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
         return MsSqlHook(mssql_conn_id=self.conn_id)
     elif self.conn_type == 'odbc':
         from airflow.providers.odbc.hooks.odbc import OdbcHook
         return OdbcHook(odbc_conn_id=self.conn_id)
     elif self.conn_type == 'oracle':
         from airflow.providers.oracle.hooks.oracle import OracleHook
         return OracleHook(oracle_conn_id=self.conn_id)
     elif self.conn_type == 'vertica':
         from airflow.providers.vertica.hooks.vertica import VerticaHook
         return VerticaHook(vertica_conn_id=self.conn_id)
     elif self.conn_type == 'cloudant':
         from airflow.providers.cloudant.hooks.cloudant import CloudantHook
         return CloudantHook(cloudant_conn_id=self.conn_id)
     elif self.conn_type == 'jira':
         from airflow.providers.jira.hooks.jira import JiraHook
         return JiraHook(jira_conn_id=self.conn_id)
     elif self.conn_type == 'redis':
         from airflow.providers.redis.hooks.redis import RedisHook
         return RedisHook(redis_conn_id=self.conn_id)
     elif self.conn_type == 'wasb':
         from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
         return WasbHook(wasb_conn_id=self.conn_id)
     elif self.conn_type == 'docker':
         from airflow.providers.docker.hooks.docker import DockerHook
         return DockerHook(docker_conn_id=self.conn_id)
     elif self.conn_type == 'azure_data_lake':
         from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook
         return AzureDataLakeHook(azure_data_lake_conn_id=self.conn_id)
     elif self.conn_type == 'azure_cosmos':
         from airflow.providers.microsoft.azure.hooks.azure_cosmos import AzureCosmosDBHook
         return AzureCosmosDBHook(azure_cosmos_conn_id=self.conn_id)
     elif self.conn_type == 'cassandra':
         from airflow.providers.apache.cassandra.hooks.cassandra import CassandraHook
         return CassandraHook(cassandra_conn_id=self.conn_id)
     elif self.conn_type == 'mongo':
         from airflow.providers.mongo.hooks.mongo import MongoHook
         return MongoHook(conn_id=self.conn_id)
     elif self.conn_type == 'gcpcloudsql':
         from airflow.gcp.hooks.cloud_sql import CloudSQLDatabaseHook
         return CloudSQLDatabaseHook(gcp_cloudsql_conn_id=self.conn_id)
     elif self.conn_type == 'grpc':
         from airflow.providers.grpc.hooks.grpc import GrpcHook
         return GrpcHook(grpc_conn_id=self.conn_id)
     raise AirflowException("Unknown hook type {}".format(self.conn_type))