def execute_command(command): log = LoggingMixin().logger log.info("Executing command in Celery: %s", command) try: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError as e: log.error(e) raise AirflowException('Celery command failed')
log.debug('Importing plugin module %s', filepath) # normalize root path as namespace namespace = '_'.join([re.sub(norm_pattern, '__', root), mod_name]) m = imp.load_source(namespace, filepath) for obj in list(m.__dict__.values()): if (inspect.isclass(obj) and issubclass(obj, AirflowPlugin) and obj is not AirflowPlugin): obj.validate() if obj not in plugins: plugins.append(obj) except Exception as e: log.exception(e) log.error('Failed to import plugin %s', filepath) def make_module(name, objects): log.debug('Creating module %s', name) name = name.lower() module = imp.new_module(name) module._name = name.split('.')[-1] module._objects = objects module.__dict__.update((o.__name__, o) for o in objects) return module # Plugin components to integrate as modules operators_modules = [] hooks_modules = []
# limitations under the License. from airflow.hooks.base_hook import BaseHook from airflow import configuration from hdfs import InsecureClient, HdfsError from airflow.utils.log.LoggingMixin import LoggingMixin _kerberos_security_mode = configuration.get("core", "security") == "kerberos" if _kerberos_security_mode: try: from hdfs.ext.kerberos import KerberosClient except ImportError: log = LoggingMixin().logger log.error("Could not load the Kerberos extension for the WebHDFSHook.") raise from airflow.exceptions import AirflowException class AirflowWebHDFSHookException(AirflowException): pass class WebHDFSHook(BaseHook): """ Interact with HDFS. This class is a wrapper around the hdfscli library. """ def __init__(self, webhdfs_conn_id='webhdfs_default', proxy_user=None): self.webhdfs_conn_id = webhdfs_conn_id self.proxy_user = proxy_user