def get_databricks_java_agent_conf(): config = AirflowTrackingConfig.from_databand_context() agent_jar = config.databricks_dbnd_java_agent logger.debug("found agent_jar %s", agent_jar) if agent_jar is None: logger.warning("You are not using the dbnd java agent") return return create_spark_java_agent_conf(agent_jar)
def get_databricks_java_agent_conf(): config = TrackingConfig() agent_jar = config.databricks_dbnd_java_agent logger.debug("found agent_jar %s", agent_jar) if agent_jar is None: logger.warning("No agent jar found") return return create_spark_java_agent_conf(agent_jar)
def get_local_spark_java_agent_conf(): config = TrackingConfig() agent_jar = config.local_dbnd_java_agent logger.debug("found agent_jar %s", agent_jar) if agent_jar is None or not os.path.exists(agent_jar): logger.warning("The wanted agents jar doesn't exists: %s", agent_jar) return return create_spark_java_agent_conf(agent_jar)
def get_spark_submit_java_agent_conf(): config = AirflowTrackingConfig.from_databand_context() agent_jar = config.spark_submit_dbnd_java_agent if agent_jar is None: logger.warning("You are not using the dbnd java agent") return logger.debug("found agent_jar %s", agent_jar) if not os.path.exists(agent_jar): logger.warning( "The wanted dbnd java agent is not found: {agent_path}".format( agent_path=agent_jar)) return return create_spark_java_agent_conf(agent_jar)
def get_databricks_python_script_name(raw_script_path): # type: (str) -> Dict[str,str] try: script_name = os.path.basename(raw_script_path) if script_name: return {ENV_DBND_SCRIPT_NAME: script_name} else: logger.warning("Unable to determine script name from path %s", raw_script_path) return {} except Exception as exc: logger.error( "Unable to determine script name from path %s, exception: %s", raw_script_path, exc, ) return {}