def default_spark_home_path(): response = _service.request(service_pb2.SingleStringRequest(), "default_spark_home_path") import google.protobuf.json_format as json_format res = json_format.Parse(response, service_pb2.SingleStringResponse()) return res.response.encode("utf8"), res.status
def default_tmp_hdfs_path(unique_id): request = service_pb2.SingleStringRequest() request.request = unique_id response = _service.request(request, "tmp_hdfs_path") import google.protobuf.json_format as json_format res = json_format.Parse(response, service_pb2.SingleStringResponse()) return res.response.encode("utf8"), res.status
def default_hadoop_client_path(): response = _service.request(service_pb2.SingleStringRequest(), "default_hadoop_client_path") import google.protobuf.json_format as json_format res = json_format.Parse(response, service_pb2.SingleStringResponse()) #logger.info(res) return res.response.encode("utf8"), res.status
def default_hadoop_job_ugi(hadoop_config_path=None): """ request default hadoop.job.ugi from hadoop config """ if hadoop_config_path is None: hadoop_config_path = default_hadoop_config_path() request = service_pb2.SingleStringRequest() request.request = hadoop_config_path response = _service.request(request, "default_hadoop_job_ugi") import google.protobuf.json_format as json_format res = json_format.Parse(response, service_pb2.SingleStringResponse()) return res.response.encode("utf8"), res.status
def default_fs_defaultfs(hadoop_config_path=None): """ request default fs.defaultFS from hadoop config """ if hadoop_config_path is None: hadoop_config_path = default_hadoop_config_path() request = service_pb2.SingleStringRequest() request.request = hadoop_config_path response = _service.request(request, "default_fs_defaultfs") import google.protobuf.json_format as json_format res = json_format.Parse(response, service_pb2.SingleStringResponse()) #logger.info(res) return res.response.encode("utf8"), res.status