def dbnd_setup_plugin(): register_file_system(AZURE_BLOB_FS_NAME, build_azure_blob_fs_client) register_file_system_name_custom_resolver(match_azure_blob_path) from dbnd_azure.env import AzureCloudConfig register_config_cls(AzureCloudConfig)
def dbnd_setup_plugin(): # Set additional airflow configuration configure_airflow_sql_alchemy_conn() from dbnd import register_config_cls from dbnd_airflow.config import AirflowConfig register_config_cls(AirflowConfig)
def dbnd_setup_plugin(): from dbnd_spark.local.local_spark_config import SparkLocalEngineConfig from dbnd_spark.spark_bootstrap import dbnd_spark_bootstrap register_config_cls(SparkLocalEngineConfig) register_config_cls(LivySparkConfig) dbnd_spark_bootstrap()
def dbnd_setup_plugin(): from dbnd_hdfs.fs.hdfs_hdfscli import HdfsCli from dbnd_hdfs.fs.hdfs_pyox import HdfsPyox register_config_cls(HdfsCli) register_config_cls(HdfsPyox) from dbnd_hdfs.fs.hdfs import create_hdfs_client register_file_system("hdfs", create_hdfs_client)
def pytest_configure(config): markexpr = getattr(config.option, "markexpr", "") marks = [markexpr] if markexpr else [] for mark in markers_to_exlude_by_default: if mark not in markexpr: marks.append("not %s" % mark) new_markexpr = " and ".join(marks) setattr(config.option, "markexpr", new_markexpr) register_task(TConfig) register_config_cls(FooConfig)
def dbnd_setup_plugin(): from dbnd_spark.local.local_spark_config import SparkLocalEngineConfig register_config_cls(SparkLocalEngineConfig) register_config_cls(LivySparkConfig) try: from dbnd_spark.targets import register_targets register_targets() except ImportError: pass
def dbnd_setup_plugin(): from dbnd_spark.local.local_spark_config import SparkLocalEngineConfig from dbnd_spark.spark_bootstrap import dbnd_spark_bootstrap register_config_cls(SparkLocalEngineConfig) register_config_cls(LivySparkConfig) dbnd_spark_bootstrap() if has_pyspark_imported() and spark_tracking_enabled(): config_store = read_spark_environ_config() dbnd_config.set_values(config_store, "system") else: _debug_init_print( "spark conf is not loaded since pyspark is not imported or DBND__ENABLE__SPARK_CONTEXT_ENV is not set" )
def dbnd_setup_plugin(): from dbnd_docker.docker.docker_engine_config import DockerEngineConfig from dbnd_docker.docker.docker_task import DockerRunTask register_config_cls(DockerEngineConfig) register_config_cls(DockerRunTask) if is_airflow_enabled(): from dbnd_docker.kubernetes.kubernetes_engine_config import ( KubernetesEngineConfig, ) register_config_cls(KubernetesEngineConfig) logger.debug("Registered kubernetes plugin")
def dbnd_setup_plugin(): from dbnd_gcp.dataflow.dataflow_config import DataflowConfig from dbnd_gcp.env import GcpEnvConfig register_config_cls(GcpEnvConfig) register_config_cls(DataflowConfig) if is_plugin_enabled("dbnd-spark"): from dbnd_gcp.dataproc.dataproc_config import DataprocConfig register_config_cls(DataprocConfig) register_file_system(FileSystems.gcs, build_gcs_client)
def dbnd_setup_plugin(): # register configs from dbnd_aws.emr.emr_config import EmrConfig from dbnd_aws.env import AwsEnvConfig register_config_cls(EmrConfig) register_config_cls(AwsEnvConfig) if is_plugin_enabled("dbnd-docker"): from dbnd_aws.batch.aws_batch_ctrl import AwsBatchConfig register_config_cls(AwsBatchConfig) from dbnd_aws.fs import build_s3_fs_client register_file_system(FileSystems.s3, build_s3_fs_client)
def dbnd_setup_plugin(): register_config_cls(PostgresConfig)
def dbnd_setup_plugin(): from dbnd_mlflow.mlflow_config import MLFlowTrackingConfig register_config_cls(MLFlowTrackingConfig)
def dbnd_setup_plugin(): from dbnd import register_config_cls from dbnd_snowflake.snowflake_config import SnowflakeConfig register_config_cls(SnowflakeConfig)
def dbnd_setup_plugin(): from dbnd_qubole.qubole_config import QuboleConfig register_config_cls(QuboleConfig)
def dbnd_setup_plugin(): from dbnd_databricks.databricks_config import DatabricksConfig register_config_cls(DatabricksConfig)
def dbnd_setup_plugin(): from dbnd import register_config_cls from dbnd_airflow.config import AirflowConfig register_config_cls(AirflowConfig)