def execute(self, context: dict) -> Any: hook = AzureDataLakeHook( azure_data_lake_conn_id=self.azure_data_lake_conn_id) return hook.remove(path=self.path, recursive=self.recursive, ignore_not_found=self.ignore_not_found)
def execute(self, context: dict) -> list: hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id) self.log.info('Getting list of ADLS files in path: %s', self.path) return hook.list(path=self.path)
def test_list_walk(self, mock_lib, mock_fs): from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key') hook.list('file_path/some_folder/') mock_fs.return_value.walk.assert_called_once_with( 'file_path/some_folder/')
def test_remove(self, mock_lib, mock_fs): from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key') hook.remove('filepath', True) mock_fs.return_value.remove.assert_called_once_with('filepath', recursive=True)
def test_conn(self, mock_lib): from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook from azure.datalake.store import core hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key') self.assertIsNone(hook._conn) self.assertEqual(hook.conn_id, 'adl_test_key') self.assertIsInstance(hook.get_conn(), core.AzureDLFileSystem) assert mock_lib.auth.called
def test_download_file(self, mock_lib, mock_downloader): from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key') hook.download_file(local_path='test_adl_hook.py', remote_path='/test_adl_hook.py', nthreads=64, overwrite=True, buffersize=4194304, blocksize=4194304) mock_downloader.assert_called_once_with(hook.get_conn(), lpath='test_adl_hook.py', rpath='/test_adl_hook.py', nthreads=64, overwrite=True, buffersize=4194304, blocksize=4194304)
def execute(self, context: dict) -> None: oracle_hook = OracleHook(oracle_conn_id=self.oracle_conn_id) azure_data_lake_hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id) self.log.info("Dumping Oracle query results to local file") conn = oracle_hook.get_conn() cursor = conn.cursor() # type: ignore[attr-defined] cursor.execute(self.sql, self.sql_params) with TemporaryDirectory(prefix='airflow_oracle_to_azure_op_') as temp: self._write_temp_file(cursor, os.path.join(temp, self.filename)) self.log.info("Uploading local file to Azure Data Lake") azure_data_lake_hook.upload_file( os.path.join(temp, self.filename), os.path.join(self.azure_data_lake_path, self.filename) ) cursor.close() conn.close() # type: ignore[attr-defined]
def execute(self, context: dict) -> None: if '**' in self.local_path: raise AirflowException( "Recursive glob patterns using `**` are not supported") if not self.extra_upload_options: self.extra_upload_options = {} hook = AzureDataLakeHook( azure_data_lake_conn_id=self.azure_data_lake_conn_id) self.log.info('Uploading %s to %s', self.local_path, self.remote_path) return hook.upload_file( local_path=self.local_path, remote_path=self.remote_path, nthreads=self.nthreads, overwrite=self.overwrite, buffersize=self.buffersize, blocksize=self.blocksize, **self.extra_upload_options, )
def execute(self, context): # use the super to list all files in an Azure Data Lake path files = super().execute(context) g_hook = GCSHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, impersonation_chain=self.google_impersonation_chain, ) if not self.replace: # if we are not replacing -> list all files in the ADLS path # and only keep those files which are present in # ADLS and not in Google Cloud Storage bucket_name, prefix = _parse_gcs_url(self.dest_gcs) existing_files = g_hook.list(bucket_name=bucket_name, prefix=prefix) files = set(files) - set(existing_files) if files: hook = AzureDataLakeHook( azure_data_lake_conn_id=self.azure_data_lake_conn_id) for obj in files: with NamedTemporaryFile(mode='wb', delete=True) as f: hook.download_file(local_path=f.name, remote_path=obj) f.flush() dest_gcs_bucket, dest_gcs_prefix = _parse_gcs_url( self.dest_gcs) dest_path = os.path.join(dest_gcs_prefix, obj) self.log.info("Saving file to %s", dest_path) g_hook.upload(bucket_name=dest_gcs_bucket, object_name=dest_path, filename=f.name, gzip=self.gzip) self.log.info("All done, uploaded %d files to GCS", len(files)) else: self.log.info("In sync, no files needed to be uploaded to GCS") return files
def test_check_for_blob(self, mock_lib, mock_filesystem): from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key') hook.check_for_file('file_path') mock_filesystem.glob.called
def get_hook(self): if self.conn_type == 'mysql': from airflow.providers.mysql.hooks.mysql import MySqlHook return MySqlHook(mysql_conn_id=self.conn_id) elif self.conn_type == 'google_cloud_platform': from airflow.gcp.hooks.bigquery import BigQueryHook return BigQueryHook(bigquery_conn_id=self.conn_id) elif self.conn_type == 'postgres': from airflow.providers.postgres.hooks.postgres import PostgresHook return PostgresHook(postgres_conn_id=self.conn_id) elif self.conn_type == 'pig_cli': from airflow.providers.apache.pig.hooks.pig import PigCliHook return PigCliHook(pig_cli_conn_id=self.conn_id) elif self.conn_type == 'hive_cli': from airflow.providers.apache.hive.hooks.hive import HiveCliHook return HiveCliHook(hive_cli_conn_id=self.conn_id) elif self.conn_type == 'presto': from airflow.providers.presto.hooks.presto import PrestoHook return PrestoHook(presto_conn_id=self.conn_id) elif self.conn_type == 'hiveserver2': from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook return HiveServer2Hook(hiveserver2_conn_id=self.conn_id) elif self.conn_type == 'sqlite': from airflow.providers.sqlite.hooks.sqlite import SqliteHook return SqliteHook(sqlite_conn_id=self.conn_id) elif self.conn_type == 'jdbc': from airflow.providers.jdbc.hooks.jdbc import JdbcHook return JdbcHook(jdbc_conn_id=self.conn_id) elif self.conn_type == 'mssql': from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook return MsSqlHook(mssql_conn_id=self.conn_id) elif self.conn_type == 'odbc': from airflow.providers.odbc.hooks.odbc import OdbcHook return OdbcHook(odbc_conn_id=self.conn_id) elif self.conn_type == 'oracle': from airflow.providers.oracle.hooks.oracle import OracleHook return OracleHook(oracle_conn_id=self.conn_id) elif self.conn_type == 'vertica': from airflow.providers.vertica.hooks.vertica import VerticaHook return VerticaHook(vertica_conn_id=self.conn_id) elif self.conn_type == 'cloudant': from airflow.providers.cloudant.hooks.cloudant import CloudantHook return CloudantHook(cloudant_conn_id=self.conn_id) elif self.conn_type == 'jira': from airflow.providers.jira.hooks.jira import JiraHook return JiraHook(jira_conn_id=self.conn_id) elif self.conn_type == 'redis': from airflow.providers.redis.hooks.redis import RedisHook return RedisHook(redis_conn_id=self.conn_id) elif self.conn_type == 'wasb': from airflow.providers.microsoft.azure.hooks.wasb import WasbHook return WasbHook(wasb_conn_id=self.conn_id) elif self.conn_type == 'docker': from airflow.providers.docker.hooks.docker import DockerHook return DockerHook(docker_conn_id=self.conn_id) elif self.conn_type == 'azure_data_lake': from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook return AzureDataLakeHook(azure_data_lake_conn_id=self.conn_id) elif self.conn_type == 'azure_cosmos': from airflow.providers.microsoft.azure.hooks.azure_cosmos import AzureCosmosDBHook return AzureCosmosDBHook(azure_cosmos_conn_id=self.conn_id) elif self.conn_type == 'cassandra': from airflow.providers.apache.cassandra.hooks.cassandra import CassandraHook return CassandraHook(cassandra_conn_id=self.conn_id) elif self.conn_type == 'mongo': from airflow.providers.mongo.hooks.mongo import MongoHook return MongoHook(conn_id=self.conn_id) elif self.conn_type == 'gcpcloudsql': from airflow.gcp.hooks.cloud_sql import CloudSQLDatabaseHook return CloudSQLDatabaseHook(gcp_cloudsql_conn_id=self.conn_id) elif self.conn_type == 'grpc': from airflow.providers.grpc.hooks.grpc import GrpcHook return GrpcHook(grpc_conn_id=self.conn_id) raise AirflowException("Unknown hook type {}".format(self.conn_type))