Beispiel #1
0
def track_databricks_submit_run_operator(operator):
    config = operator.json
    # passing env variables is only supported in new clusters
    if "new_cluster" in config:
        cluster = config["new_cluster"]
        cluster.setdefault("spark_env_vars", {})
        cluster["spark_env_vars"].update(get_airflow_conf())

        if "spark_jar_task" in config:
            cluster.setdefault("spark_conf", {})
            agent_conf = get_databricks_java_agent_conf()
            if agent_conf is not None:
                cluster["spark_conf"].update(agent_conf)
Beispiel #2
0
def get_dbnd_tracking_spark_flat_conf(**kwargs):
    return flat_conf(add_spark_env_fields(get_airflow_conf(**kwargs)))
Beispiel #3
0
def get_dbnd_tracking_spark_conf_dict(**kwargs):
    return dict(add_spark_env_fields(get_airflow_conf(**kwargs)))