def get_package_commands(package_name): """List the real path(s) to executables for a specific dcos subcommand :param package_name: package name :type package_name: str :returns: list of all the dcos program paths in package :rtype: [str] """ bin_dir = os.path.join(_package_dir(package_name), constants.DCOS_SUBCOMMAND_ENV_SUBDIR, "bin") if not os.path.exists(bin_dir) and util.is_windows_platform(): bin_dir = os.path.join(_package_dir(package_name), constants.DCOS_SUBCOMMAND_ENV_SUBDIR, "Scripts") executables = [] for filename in os.listdir(bin_dir): path = os.path.join(bin_dir, filename) if (filename.startswith(constants.DCOS_COMMAND_PREFIX) and _is_executable(path)): executables.append(path) return executables
def _page(output, pager_command=None): """Conditionally pipes the supplied output through a pager. :param output: :type output: object :param pager_command: :type pager_command: str """ output = six.text_type(output) if not sys.stdout.isatty() or util.is_windows_platform(): print(output) return num_lines = output.count('\n') exceeds_tty_height = pager.getheight() - 1 < num_lines if pager_command is None: pager_command = 'less -R' paginate = config.get_config_val("core.pagination") or True if exceeds_tty_height and paginate and \ spawn.find_executable(pager_command.split(' ')[0]) is not None: pydoc.pipepager(output, cmd=pager_command) else: print(output)
def _page(output, pager_command=None): """Conditionally pipes the supplied output through a pager. :param output: :type output: object :param pager_command: :type pager_command: str """ output = six.text_type(output) if pager_command is None: pager_command = 'less -R' if not sys.stdout.isatty() or util.is_windows_platform(): print(output) return num_lines = output.count('\n') exceeds_tty_height = pager.getheight() - 1 < num_lines paginate = util.get_config().get("core.pagination", True) if exceeds_tty_height and paginate: pydoc.pipepager(output, cmd=pager_command) else: print(output)
def _get_command(master, args): spark_executable = 'spark-submit.cmd' if util.is_windows_platform() \ else 'spark-submit' submit_file = spark_file(os.path.join('bin', spark_executable)) return [submit_file, "--deploy-mode", "cluster", "--master", "mesos://" + master] + args
def _page(output, pager_command=None): """Conditionally pipes the supplied output through a pager. :param output: :type output: object :param pager_command: :type pager_command: str """ output = str(output) if pager_command is None: pager_command = 'less -R' if not sys.stdout.isatty() or util.is_windows_platform(): print(output) return num_lines = output.count('\n') exceeds_tty_height = pager.getheight() - 1 < num_lines if exceeds_tty_height: pydoc.pipepager(output, cmd=pager_command) else: print(output)
def _process_json(event, pager_command): """Conditionally highlights the supplied JSON value. :param event: event to emit to stdout :type event: str, dict, list, or dcos.errors.Error :returns: String representation of the supplied JSON value, possibly syntax-highlighted. :rtype: str """ json_output = json.dumps(event, sort_keys=True, indent=2) # Strip trailing whitespace json_output = re.sub(r'\s+$', '', json_output, 0, re.M) force_colors = False # TODO(CD): Introduce a --colors flag if not sys.stdout.isatty(): if force_colors: return _highlight_json(json_output) else: return json_output supports_colors = not util.is_windows_platform() pager_is_set = pager_command is not None should_highlight = force_colors or supports_colors and not pager_is_set if should_highlight: json_output = _highlight_json(json_output) return json_output
def list_paths(): """List the real path to executable dcos subcommand programs. :returns: list of all the dcos program paths :rtype: [str] """ subcommands = [] for package in distributions(): bin_dir = os.path.join(_package_dir(package), constants.DCOS_SUBCOMMAND_ENV_SUBDIR, "bin") if not os.path.exists(bin_dir) and util.is_windows_platform(): bin_dir = os.path.join(_package_dir(package), constants.DCOS_SUBCOMMAND_ENV_SUBDIR, "Scripts") executables = [] for filename in os.listdir(bin_dir): path = os.path.join(bin_dir, filename) if (filename.startswith(constants.DCOS_COMMAND_PREFIX) and _is_executable(path)): executables.append(path) subcommands += executables return subcommands
def get_package_commands(package_name): """List the subcommands of a specific package. :param package_name: package name :type package_name: str :returns: list of all the dcos program paths in package :rtype: [str] """ bin_dir = os.path.join(_package_dir(package_name), constants.DCOS_SUBCOMMAND_ENV_SUBDIR, "bin") if not os.path.exists(bin_dir) and util.is_windows_platform(): bin_dir = os.path.join(_package_dir(package_name), constants.DCOS_SUBCOMMAND_ENV_SUBDIR, "Scripts") executables = [] plugin_toml = os.path.join(os.path.join(bin_dir, os.pardir), "plugin.toml") if os.path.exists(plugin_toml): with open(plugin_toml, "r", encoding="utf-8") as fp: plugin = toml.load(fp) for command in plugin["commands"]: executables.append(command["name"]) else: for filename in os.listdir(bin_dir): path = os.path.join(bin_dir, filename) if (filename.startswith(constants.DCOS_COMMAND_PREFIX) and _is_executable(path)): subcommand = filename[len(constants.DCOS_COMMAND_PREFIX):] executables.append(subcommand) return executables
def validate(self): """Validates a package registry. :returns: Validation errors :rtype: [str] """ # TODO(CD): implement these checks in pure Python? scripts_dir = os.path.join(self._base_path, 'scripts') if util.is_windows_platform(): validate_script = os.path.join(scripts_dir, '1-validate-packages.ps1') cmd = [ 'powershell', '-ExecutionPolicy', 'ByPass', '-File', validate_script ] result = subprocess.call(cmd) else: validate_script = os.path.join(scripts_dir, '1-validate-packages.sh') result = subprocess.call(validate_script) if result is not 0: return ["Source tree is not valid [{}]".format(self._base_path)] else: return []
def _is_executable(path): """ :param path: the path to a program :type path: str :returns: True if the path is an executable; False otherwise :rtype: bool """ return os.access(path, os.X_OK) and (not util.is_windows_platform() or path.endswith(".exe"))
def _get_command(master, args): spark_executable = 'spark-submit.cmd' if util.is_windows_platform() \ else 'spark-submit' submit_file = spark_file(os.path.join('bin', spark_executable)) return [ submit_file, "--deploy-mode", "cluster", "--master", "mesos://" + master ] + args
def _is_executable(path): """ :param path: the path to a program :type path: str :returns: True if the path is an executable; False otherwise :rtype: bool """ return os.access(path, os.X_OK) and ( not util.is_windows_platform() or path.endswith('.exe'))
def run(self): """Run the helper threads in this class which enable streaming of STDIN/STDOUT/STDERR between the CLI and the Mesos Agent API. If a tty is requested, we take over the current terminal and put it into raw mode. We make sure to reset the terminal back to its original settings before exiting. """ # Without a TTY. if not self.tty: try: self._start_threads() self.exit_event.wait() except Exception as e: self.exception = e if self.exception: raise self.exception return # With a TTY. if util.is_windows_platform(): raise DCOSException( "Running with the '--tty' flag is not supported on windows.") if not sys.stdin.isatty(): raise DCOSException( "Must be running in a tty to pass the '--tty flag'.") fd = sys.stdin.fileno() oldtermios = termios.tcgetattr(fd) try: tty.setraw(fd, when=termios.TCSANOW) if self.interactive: self._window_resize(signal.SIGWINCH, None) signal.signal(signal.SIGWINCH, self._window_resize) self._start_threads() self.exit_event.wait() except Exception as e: self.exception = e termios.tcsetattr( sys.stdin.fileno(), termios.TCSAFLUSH, oldtermios) if self.exception: raise self.exception
def run(self): """Run the helper threads in this class which enable streaming of STDIN/STDOUT/STDERR between the CLI and the Mesos Agent API. If a tty is requested, we take over the current terminal and put it into raw mode. We make sure to reset the terminal back to its original settings before exiting. """ # Without a TTY. if not self.tty: try: self._start_threads() self.exit_event.wait() except Exception as e: self.exception = e if self.exception: raise self.exception return # With a TTY. if util.is_windows_platform(): raise DCOSException( "Running with the '--tty' flag is not supported on windows.") if not sys.stdin.isatty(): raise DCOSException( "Must be running in a tty to pass the '--tty flag'.") fd = sys.stdin.fileno() oldtermios = termios.tcgetattr(fd) try: if self.interactive: tty.setraw(fd, when=termios.TCSANOW) self._window_resize(signal.SIGWINCH, None) signal.signal(signal.SIGWINCH, self._window_resize) self._start_threads() self.exit_event.wait() except Exception as e: self.exception = e termios.tcsetattr( sys.stdin.fileno(), termios.TCSAFLUSH, oldtermios) if self.exception: raise self.exception
def check_java(): java_executable = 'java.exe' if util.is_windows_platform() else 'java' # Check if JAVA is in the PATH if which(java_executable) is not None: return check_java_version(java_executable) # Check if JAVA_HOME is set and find java java_home = os.environ.get('JAVA_HOME') if java_home is not None: java_path = os.path.join(java_home, "bin", java_executable) if os.path.isfile(java_path): return check_java_version(java_path) print("DCOS Spark requires Java 1.7.x to be installed, please install JRE") return False
def _get_command(dispatcher, args): spark_executable = 'spark-submit.cmd' if util.is_windows_platform() \ else 'spark-submit' submit_file = spark_file(os.path.join('bin', spark_executable)) if dispatcher.startswith("https://"): dispatcher = "mesos-ssl://" + dispatcher[8:] else: dispatcher = "mesos://" + dispatcher[7:] if _cert_verification(): ssl_ops = [] else: ssl_ops = ["--conf", "spark.ssl.noCertVerification=true"] return [submit_file, "--deploy-mode", "cluster", "--master", dispatcher ] + ssl_ops + args
def _get_command(dispatcher, args): spark_executable = 'spark-submit.cmd' if util.is_windows_platform() \ else 'spark-submit' submit_file = spark_file(os.path.join('bin', spark_executable)) if dispatcher.startswith("https://"): dispatcher = "mesos-ssl://" + dispatcher[8:] else: dispatcher = "mesos://" + dispatcher[7:] if _cert_verification(): ssl_ops = [] else: ssl_ops = ["--conf", "spark.ssl.noCertVerification=true"] return [submit_file, "--deploy-mode", "cluster", "--master", dispatcher] + ssl_ops + args
def validate(self): """Validates a package registry. :returns: Validation errors :rtype: [Error] """ # TODO(CD): implement these checks in pure Python? scripts_dir = os.path.join(self._base_path, "scripts") if util.is_windows_platform(): validate_script = os.path.join(scripts_dir, "1-validate-packages.ps1") cmd = ["powershell", "-ExecutionPolicy", "ByPass", "-File", validate_script] result = subprocess.call(cmd) else: validate_script = os.path.join(scripts_dir, "1-validate-packages.sh") result = subprocess.call(validate_script) if result is not 0: return [Error("Source tree is not valid [{}]".format(self._base_path))] else: return []
def find_java(): def executable(file_path): return os.path.isfile(file_path) and os.access(file_path, os.X_OK) java_binary = 'java' if util.is_windows_platform(): java_binary = java_binary + '.exe' java_home = os.environ.get('JAVA_HOME') if java_home is not None and executable(java_home + "/bin/" + java_binary): return java_home + "/bin/" + java_binary if 'PATH' in os.environ: for path in os.environ['PATH'].split(os.pathsep): path = path.strip('"') java_file = os.path.join(path, java_binary) if executable(java_file): return java_file raise CliError("This command requires Java to be installed. " "Please install JRE")
def _process_json(event): """Conditionally highlights the supplied JSON value. :param event: event to emit to stdout :type event: str, dict, list, or dcos.errors.Error :returns: String representation of the supplied JSON value, possibly syntax-highlighted. :rtype: str """ json_output = json.dumps(event, sort_keys=True, indent=2) # Strip trailing whitespace json_output = re.sub(r'\s+$', '', json_output, 0, re.M) if not sys.stdout.isatty(): return json_output if not util.is_windows_platform(): json_output = _highlight_json(json_output) return json_output
def deployment_watch(self, deployment_id, max_count, interval): """ :param deployment_id: the application id :type deployment_id: str :param max_count: maximum number of polling calls :type max_count: str :param interval: wait interval in seconds between polling calls :type interval: str :returns: process return code :rtype: int """ if max_count is not None: max_count = util.parse_int(max_count) interval = 1 if interval is None else util.parse_int(interval) client = self._create_marathon_client() count = 0 while max_count is None or count < max_count: deployment = client.get_deployment(deployment_id) if deployment is None: return 0 if util.is_windows_platform(): os.system('cls') else: if 'TERM' in os.environ: os.system('clear') emitter.publish('Deployment update time: ' '{} \n'.format( time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()))) emitter.publish(deployment) time.sleep(interval) count += 1 return 0
def _deployment_watch(deployment_id, max_count, interval): """ :param deployment_id: the application id :type deployment_di: str :param max_count: maximum number of polling calls :type max_count: str :param interval: wait interval in seconds between polling calls :type interval: str :returns: process return code :rtype: int """ if max_count is not None: max_count = util.parse_int(max_count) interval = 1 if interval is None else util.parse_int(interval) client = marathon.create_client() count = 0 while max_count is None or count < max_count: deployment = client.get_deployment(deployment_id) if deployment is None: return 0 if util.is_windows_platform(): os.system('cls') else: if 'TERM' in os.environ: os.system('clear') emitter.publish('Deployment update time: ' '{} \n'.format(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()))) emitter.publish(deployment) time.sleep(interval) count += 1 return 0
def validate(self): """Validates a package registry. :returns: Validation errors :rtype: [str] """ # TODO(CD): implement these checks in pure Python? scripts_dir = os.path.join(self._base_path, 'scripts') if util.is_windows_platform(): validate_script = os.path.join(scripts_dir, '1-validate-packages.ps1') cmd = ['powershell', '-ExecutionPolicy', 'ByPass', '-File', validate_script] result = subprocess.call(cmd) else: validate_script = os.path.join(scripts_dir, '1-validate-packages.sh') result = subprocess.call(validate_script) if result is not 0: return ["Source tree is not valid [{}]".format(self._base_path)] else: return []
:param package_name: the name of the package :type package_name: str :returns: True if the subcommand was uninstalled :rtype: bool """ pkg_dir = package_dir(package_name) if os.path.isdir(pkg_dir): shutil.rmtree(pkg_dir) return True return False BIN_DIRECTORY = 'Scripts' if util.is_windows_platform() else 'bin' def _install_with_pip( package_name, env_directory, requirements): """ :param package_name: the name of the package :type package_name: str :param env_directory: the path to the directory in which to install the package's virtual env :type env_directory: str :param requirements: the list of pip requirements :type requirements: list of str :rtype: None
def _install_with_binary( package_name, env_directory, binary_cli): """ :param package_name: the name of the package :type package_name: str :param env_directory: the path to the directory in which to install the package's binary_cli :type env_directory: str :param binary_cli: binary cli to install :type binary_cli: str :rtype: None """ binary_url, kind = binary_cli.get("url"), binary_cli.get("kind") binary_url = _rewrite_binary_url( binary_url, config.get_config_val("core.dcos_url")) try: env_bin_dir = os.path.join(env_directory, BIN_DIRECTORY) if kind in ["executable", "zip"]: with util.temptext() as file_tmp: _, binary_tmp = file_tmp _download_and_store(binary_url, binary_tmp) _check_hash(binary_tmp, binary_cli.get("contentHash")) if kind == "executable": util.ensure_dir_exists(env_bin_dir) binary_name = "dcos-{}".format(package_name) if util.is_windows_platform(): binary_name += '.exe' binary_file = os.path.join(env_bin_dir, binary_name) # copy to avoid windows error of moving open file # binary_tmp will be removed by context manager shutil.copy(binary_tmp, binary_file) else: # kind == "zip" with zipfile.ZipFile(binary_tmp) as zf: zf.extractall(env_directory) # check contents for package_name/env/bin folder structure if not os.path.exists(env_bin_dir): msg = ( "CLI subcommand for [{}] has an unexpected format. " "Please contact the package maintainer".format( package_name)) raise DCOSException(msg) else: msg = ("CLI subcommand for [{}] is an unsupported type: {}" "Please contact the package maintainer".format( package_name, kind)) raise DCOSException(msg) # make binar(ies) executable for f in os.listdir(env_bin_dir): binary = os.path.join(env_bin_dir, f) if (f.startswith(constants.DCOS_COMMAND_PREFIX)): st = os.stat(binary) os.chmod(binary, st.st_mode | stat.S_IEXEC) except DCOSException: raise except Exception as e: logger.exception(e) raise _generic_error(package_name, e.message) return None
def run(dispatcher, args, verbose): """ Runs spark-submit. :param dispatcher: Spark Dispatcher URL. Used to construct --master. :type dispatcher: string :param args: Extra arguments to spark-submit :type args: list[string] :param verbose: If true, prints verbose information to stdout. :type verbose: boolean """ if not check_java(): return (None, 1) proxying = _should_proxy(dispatcher) proxy_thread = ProxyThread(_get_token() if proxying else None, dispatcher) if proxying: proxy_thread.start() dispatcher = 'http://localhost:{}'.format(proxy_thread.port()) command = _get_command(dispatcher, args) # On Windows, python 2 complains about unicode in env. env = dict([str(key), str(value)] for key, value in os.environ.iteritems()) \ if util.is_windows_platform() and sys.version_info[0] < 3 \ else os.environ process = subprocess.Popen(command, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if proxying: proxy_thread.proxy.shutdown() proxy_thread.join() if verbose: print("Ran command: " + " ".join(command)) print("Stdout:") print(stdout) print("Stderr:") print(stderr) err = stderr.decode("utf-8") if process.returncode != 0: if "502 Bad Gateway" in err: print("Spark service is not found in your DCOS cluster.") return (None, process.returncode) if "500 Internal Server Error" in err: print("Error reaching Spark cluster endpoint. Please make sure " "Spark service is in running state in Marathon.") return (None, process.returncode) print("Spark submit failed:") print(stderr) return (None, process.returncode) else: if "{" in err: lines = err.splitlines() jsonStr = "" startScan = False for l in lines: if l.startswith("}") and startScan: jsonStr += l + os.linesep startScan = False elif startScan: jsonStr += l + os.linesep elif l.startswith("{"): startScan = True jsonStr += l + os.linesep response = json.loads(jsonStr) return (response, process.returncode) return (None, process.returncode)
:param package_name: the name of the package :type package_name: str :returns: True if the subcommand was uninstalled :rtype: bool """ pkg_dir = _package_dir(package_name) if os.path.isdir(pkg_dir): shutil.rmtree(pkg_dir) return True return False BIN_DIRECTORY = 'Scripts' if util.is_windows_platform() else 'bin' def _find_virtualenv(bin_directory): """ :param bin_directory: directory to first use to find virtualenv :type bin_directory: str :returns: Absolute path to virutalenv program :rtype: str """ virtualenv_path = os.path.join(bin_directory, 'virtualenv') if not os.path.exists(virtualenv_path): virtualenv_path = util.which('virtualenv') if virtualenv_path is None:
def _install_with_binary( package_name, env_directory, binary_cli): """ :param package_name: the name of the package :type package_name: str :param env_directory: the path to the directory in which to install the package's binary_cli :type env_directory: str :param binary_cli: binary cli to install :type binary_cli: str :rtype: None """ binary_url, kind = binary_cli.get("url"), binary_cli.get("kind") try: env_bin_dir = os.path.join(env_directory, BIN_DIRECTORY) if kind in ["executable", "zip"]: with util.temptext() as file_tmp: _, binary_tmp = file_tmp _download_and_store(binary_url, binary_tmp) _check_hash(binary_tmp, binary_cli.get("contentHash")) if kind == "executable": util.ensure_dir_exists(env_bin_dir) binary_name = "dcos-{}".format(package_name) if util.is_windows_platform(): binary_name += '.exe' binary_file = os.path.join(env_bin_dir, binary_name) # copy to avoid windows error of moving open file # binary_tmp will be removed by context manager shutil.copy(binary_tmp, binary_file) else: # kind == "zip" with zipfile.ZipFile(binary_tmp) as zf: zf.extractall(env_directory) # check contents for package_name/env/bin folder structure if not os.path.exists(env_bin_dir): msg = ( "CLI subcommand for [{}] has an unexpected format. " "Please contact the package maintainer".format( package_name)) raise DCOSException(msg) else: msg = ("CLI subcommand for [{}] is an unsupported type: {}" "Please contact the package maintainer".format( package_name, kind)) raise DCOSException(msg) # make binar(ies) executable for f in os.listdir(env_bin_dir): binary = os.path.join(env_bin_dir, f) if (f.startswith(constants.DCOS_COMMAND_PREFIX)): st = os.stat(binary) os.chmod(binary, st.st_mode | stat.S_IEXEC) except DCOSException: raise except Exception as e: logger.exception(e) raise _generic_error(package_name) return None
import sys import time import pytest import dcos.util as util from dcos.util import create_schema, tempdir from dcoscli.test.common import (assert_command, assert_lines, assert_lines_range, exec_command, popen_tty) from dcoscli.test.marathon import (add_app, app, remove_app, watch_all_deployments) from ..fixtures.task import task_fixture if not util.is_windows_platform(): import termios import tty SLEEP_COMPLETED = 'tests/data/marathon/apps/sleep-completed.json' SLEEP_COMPLETED1 = 'tests/data/marathon/apps/sleep-completed1.json' SLEEP1 = 'tests/data/marathon/apps/sleep1.json' SLEEP2 = 'tests/data/marathon/apps/sleep2.json' FOLLOW = 'tests/data/file/follow.json' TWO_TASKS = 'tests/data/file/two_tasks.json' TWO_TASKS_FOLLOW = 'tests/data/file/two_tasks_follow.json' LS = 'tests/data/tasks/ls-app.json' DOWNLOAD = 'tests/data/tasks/download-app.json' SH = 'tests/data/tasks/sh-app.json' CAT = 'tests/data/tasks/cat-app.json' HELLO_STDERR = 'tests/data/marathon/apps/hello-stderr.json'
import signal import sys import threading import time import uuid from functools import partial from queue import Queue from six.moves import urllib from dcos import config, http, recordio, util from dcos.errors import DCOSException, DCOSHTTPException if not util.is_windows_platform(): import termios import tty logger = util.get_logger(__name__) def get_master(dcos_client=None): """Create a Master object using the url stored in the 'core.mesos_master_url' property if it exists. Otherwise, we use the `core.dcos_url` property :param dcos_client: DCOSClient :type dcos_client: DCOSClient | None :returns: master state object :rtype: Master
:param package_name: the name of the package :type package_name: str :returns: True if the subcommand was uninstalled :rtype: bool """ pkg_dir = package_dir(package_name) if os.path.isdir(pkg_dir): shutil.rmtree(pkg_dir) return True return False BIN_DIRECTORY = "Scripts" if util.is_windows_platform() else "bin" def _install_with_pip(package_name, env_directory, requirements): """ :param package_name: the name of the package :type package_name: str :param env_directory: the path to the directory in which to install the package's virtual env :type env_directory: str :param requirements: the list of pip requirements :type requirements: list of str :rtype: None """ bin_directory = os.path.join(util.dcos_path(), BIN_DIRECTORY)
def run(dispatcher, args, verbose, props=[]): """ This method runs spark_submit with the passed in parameters. ie: ./bin/spark-submit --deploy-mode cluster --class org.apache.spark.examples.SparkPi --master mesos://10.127.131.174:8077 --executor-memory 1G --total-executor-cores 100 --driver-memory 1G http://10.127.131.174:8000/spark-examples_2.10-1.3.0-SNAPSHOT.jar 30 """ if not check_java(): return (None, 1) proxying = _should_proxy(dispatcher) proxy_thread = ProxyThread(_get_token() if proxying else None, dispatcher) if proxying: proxy_thread.start() dispatcher = 'http://localhost:{}'.format(proxy_thread.port()) command = _get_command(dispatcher, args) extra_env = {"SPARK_JAVA_OPTS": ' '.join(props)} env = dict(os.environ, **extra_env) # On Windows python 2 complains about unicode in env if util.is_windows_platform() and sys.version_info[0] < 3: env = dict([str(key), str(value)] for key, value in env.iteritems()) process = subprocess.Popen(command, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if proxying: proxy_thread.proxy.shutdown() proxy_thread.join() if verbose is True: print("Ran command: " + " ".join(command)) print("With added env vars: {0}".format(extra_env)) print("Stdout:") print(stdout) print("Stderr:") print(stderr) err = stderr.decode("utf-8") if process.returncode != 0: if "502 Bad Gateway" in err: print("Spark service is not found in your DCOS cluster.") return (None, process.returncode) if "500 Internal Server Error" in err: print("Error reaching Spark cluster endpoint. Please make sure " "Spark service is in running state in Marathon.") return (None, process.returncode) print("Spark submit failed:") print(stderr) return (None, process.returncode) else: if "{" in err: lines = err.splitlines() jsonStr = "" startScan = False for l in lines: if l.startswith("}") and startScan: jsonStr += l + os.linesep startScan = False elif startScan: jsonStr += l + os.linesep elif l.startswith("{"): startScan = True jsonStr += l + os.linesep response = json.loads(jsonStr) return (response, process.returncode) return (None, process.returncode)
def run(dispatcher, args, verbose, props=[]): """ This method runs spark_submit with the passed in parameters. ie: ./bin/spark-submit --deploy-mode cluster --class org.apache.spark.examples.SparkPi --master mesos://10.127.131.174:8077 --executor-memory 1G --total-executor-cores 100 --driver-memory 1G http://10.127.131.174:8000/spark-examples_2.10-1.3.0-SNAPSHOT.jar 30 """ if not check_java(): return (None, 1) proxying = _should_proxy(dispatcher) proxy_thread = ProxyThread(_get_token() if proxying else None, dispatcher) if proxying: proxy_thread.start() dispatcher = 'http://localhost:{}'.format(proxy_thread.port()) command = _get_command(dispatcher, args) extra_env = {"SPARK_JAVA_OPTS": ' '.join(props)} env = dict(os.environ, **extra_env) # On Windows python 2 complains about unicode in env if util.is_windows_platform() and sys.version_info[0] < 3: env = dict([str(key), str(value)] for key, value in env.iteritems()) process = subprocess.Popen( command, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if proxying: proxy_thread.proxy.shutdown() proxy_thread.join() if verbose is True: print("Ran command: " + " ".join(command)) print("With added env vars: {0}".format(extra_env)) print("Stdout:") print(stdout) print("Stderr:") print(stderr) err = stderr.decode("utf-8") if process.returncode != 0: if "502 Bad Gateway" in err: print("Spark service is not found in your DCOS cluster.") return (None, process.returncode) if "500 Internal Server Error" in err: print("Error reaching Spark cluster endpoint. Please make sure " "Spark service is in running state in Marathon.") return (None, process.returncode) print("Spark submit failed:") print(stderr) return (None, process.returncode) else: if "{" in err: lines = err.splitlines() jsonStr = "" startScan = False for l in lines: if l.startswith("}") and startScan: jsonStr += l + os.linesep startScan = False elif startScan: jsonStr += l + os.linesep elif l.startswith("{"): startScan = True jsonStr += l + os.linesep response = json.loads(jsonStr) return (response, process.returncode) return (None, process.returncode)