def _check_driver_class_path(self, job_configs, param_dict, wf_dir): overridden = edp.spark_driver_classpath(job_configs.get('configs', {})) if overridden: param_dict['driver-class-path'] = (" --driver-class-path " + overridden) return if not param_dict.get('wrapper_jar'): # no need in driver classpath if swift as datasource is not used param_dict['driver-class-path'] = "" return cp = param_dict['driver-class-path'] or "" if param_dict['deploy-mode'] == 'client' and not (cp.startswith(":") or cp.endswith(":")): cp += ":" + wf_dir param_dict['driver-class-path'] = " --driver-class-path " + cp
def _check_driver_class_path(self, job_configs, param_dict): overridden = edp.spark_driver_classpath( job_configs.get('configs', {})) if overridden: param_dict['driver-class-path'] = ( " --driver-class-path " + overridden) return if not param_dict.get('wrapper_jar'): # no need in driver classpath if swift as datasource is not used param_dict['driver-class-path'] = "" return cp = param_dict['driver-class-path'] or "" if param_dict['deploy-mode'] == 'client' and not ( cp.startswith(":") or cp.endswith(":")): cp += ":" param_dict['driver-class-path'] = " --driver-class-path " + cp