# Contrib operators are not imported by default. They should be accessed # directly: from airflow.contrib.operators.operator_module import Operator import sys # ------------------------------------------------------------------------ # # #TODO #FIXME Airflow 2.0 # # Old import machinary below. # # This is deprecated but should be kept until Airflow 2.0 # for compatibility. # # ------------------------------------------------------------------------ _operators = { 'ssh_operator': ['SSHOperator'], 'vertica_operator': ['VerticaOperator'], 'vertica_to_hive': ['VerticaToHiveTransfer'], 'qubole_operator': ['QuboleOperator'], 'spark_submit_operator': ['SparkSubmitOperator'], 'file_to_wasb': ['FileToWasbOperator'], 'fs_operator': ['FileSensor'] } import os as _os if not _os.environ.get('AIRFLOW_USE_NEW_IMPORTS', False): from airflow.utils.helpers import AirflowImporter airflow_importer = AirflowImporter(sys.modules[__name__], _operators)
'samba_hook': ['SambaHook'], 'sqlite_hook': ['SqliteHook'], 'S3_hook': ['S3Hook'], 'zendesk_hook': ['ZendeskHook'], 'http_hook': ['HttpHook'], 'druid_hook': ['DruidHook'], 'jdbc_hook': ['JdbcHook'], 'dbapi_hook': ['DbApiHook'], 'mssql_hook': ['MsSqlHook'], 'oracle_hook': ['OracleHook'], } import os as _os if not _os.environ.get('AIRFLOW_USE_NEW_IMPORTS', False): from airflow.utils.helpers import AirflowImporter airflow_importer = AirflowImporter(sys.modules[__name__], _hooks) def _integrate_plugins(): """Integrate plugins to the context""" from airflow.plugins_manager import hooks_modules for hooks_module in hooks_modules: sys.modules[hooks_module.__name__] = hooks_module globals()[hooks_module._name] = hooks_module ########################################################## # TODO FIXME Remove in Airflow 2.0 if not _os.environ.get('AIRFLOW_USE_NEW_IMPORTS', False): from zope.deprecation import deprecated as _deprecated for _hook in hooks_module._objects:
import sys import os _sensors = { 'base_sensor_operator': ['BaseSensorOperator'], 'external_task_sensor': ['ExternalTaskSensor'], 'hdfs_sensor': ['HdfsSensor'], 'hive_partition_sensor': ['HivePartitionSensor'], 'http_sensor': ['HttpSensor'], 'metastore_partition_sensor': ['MetastorePartitionSensor'], 'named_hive_partition_sensor': ['NamedHivePartitionSensor'], 's3_key_sensor': ['S3KeySensor'], 's3_prefix_sensor': ['S3PrefixSensor'], 'sql_sensor': ['SqlSensor'], 'time_delta_sensor': ['TimeDeltaSensor'], 'time_sensor': ['TimeSensor'], 'web_hdfs_sensor': ['WebHdfsSensor'] } if not os.environ.get('AIRFLOW_USE_NEW_IMPORTS', False): from airflow.utils.helpers import AirflowImporter airflow_importer = AirflowImporter(sys.modules[__name__], _sensors) def _integrate_plugins(): """Integrate plugins to the context""" from airflow.plugins_manager import sensors_modules for sensors_module in sensors_modules: sys.modules[sensors_module.__name__] = sensors_module globals()[sensors_module._name] = sensors_module