def _get_airflow_executor(self): """Creates a new instance of the configured executor if none exists and returns it""" if self.task_executor_type == AirflowTaskExecutorType.airflow_inprocess: return InProcessExecutor() if (self.task_executor_type == AirflowTaskExecutorType.airflow_multiprocess_local): if self.run.context.settings.run.parallel: return LocalExecutor() else: return SequentialExecutor() if self.task_executor_type == AirflowTaskExecutorType.airflow_kubernetes: assert_plugin_enabled("dbnd-docker") from dbnd_airflow.executors.kubernetes_executor.kubernetes_executor import ( DbndKubernetesExecutor, ) from dbnd_docker.kubernetes.kubernetes_engine_config import ( KubernetesEngineConfig, ) if not isinstance(self.target_engine, KubernetesEngineConfig): raise friendly_error.executor_k8s.kubernetes_with_non_compatible_engine( self.target_engine) kube_dbnd = self.target_engine.build_kube_dbnd() if kube_dbnd.engine_config.debug: logging.getLogger("airflow.contrib.kubernetes").setLevel( logging.DEBUG) return DbndKubernetesExecutor(kube_dbnd=kube_dbnd)
def __init__(self, task_run): super(DatabricksCtrl, self).__init__(task_run=task_run) self.databricks_config = task_run.task.spark_engine # type: DatabricksConfig self.local_dbfs_mount = None if self.databricks_config.cloud_type == DatabricksCloud.azure: assert_plugin_enabled( "dbnd-azure", "Databricks on azure requires dbnd-azure module.") self.local_dbfs_mount = DatabricksAzureConfig().local_dbfs_mount self.current_run_id = None self.hook = None
def get_cloud_sync(config, task, job): assert_plugin_enabled("dbnd-aws", "qubole on aws requires dbnd-aws module.")
def get_spark_session(): assert_plugin_enabled("dbnd-spark") import dbnd_spark return dbnd_spark.get_spark_session()