def check_kibana_plugin_installed(plugin_name, service_name=SERVICE_NAME): task_sandbox = sdk_cmd.get_task_sandbox_path(service_name) # Environment variables aren't available on DC/OS 1.9 so we manually inject # MESOS_SANDBOX (and can't use ELASTIC_VERSION). # # TODO(mpereira): improve this by making task environment variables # available in task_exec commands on 1.9. # Ticket: https://jira.mesosphere.com/browse/INFINITY-3360 cmd = "bash -c 'KIBANA_DIRECTORY=$(ls -d {}/kibana-*-linux-x86_64); $KIBANA_DIRECTORY/bin/kibana-plugin list'".format(task_sandbox) _, stdout, _ = sdk_cmd.marathon_task_exec(service_name, cmd) return plugin_name in stdout
def check_kibana_plugin_installed(plugin_name, service_name=SERVICE_NAME): task_sandbox = sdk_cmd.get_task_sandbox_path(service_name) # Environment variables aren't available on DC/OS 1.9 so we manually inject # MESOS_SANDBOX (and can't use ELASTIC_VERSION). # # TODO(mpereira): improve this by making task environment variables # available in task_exec commands on 1.9. # Ticket: https://jira.mesosphere.com/browse/INFINITY-3360 cmd = "bash -c 'KIBANA_DIRECTORY=$(ls -d {}/kibana-*-linux-x86_64); $KIBANA_DIRECTORY/bin/kibana-plugin list'".format( task_sandbox) _, stdout, _ = sdk_cmd.marathon_task_exec(service_name, cmd) return plugin_name in stdout
def test_dispatcher_task_stdout(setup_spark): task_id = service_name.lstrip("/").replace("/", "_") task = sdk_cmd._get_task_info(task_id) if not task: raise Exception("Failed to get '{}' task".format(task_id)) task_sandbox_path = sdk_cmd.get_task_sandbox_path(task_id) if not task_sandbox_path: raise Exception("Failed to get '{}' sandbox path".format(task_id)) agent_id = task["slave_id"] task_sandbox = sdk_cmd.cluster_request( "GET", "/slave/{}/files/browse?path={}".format(agent_id, task_sandbox_path) ).json() stdout_file = [f for f in task_sandbox if f["path"].endswith("/stdout")][0] assert stdout_file["size"] > 0, "stdout file should have content"
def test_task_stdout(): service_name = utils.FOLDERED_SPARK_SERVICE_NAME try: task_id = service_name.lstrip("/").replace("/", "_") utils.require_spark(service_name=service_name) task = sdk_cmd._get_task_info(task_id) if not task: raise Exception("Failed to get '{}' task".format(task_id)) task_sandbox_path = sdk_cmd.get_task_sandbox_path(task_id) if not task_sandbox_path: raise Exception("Failed to get '{}' sandbox path".format(task_id)) agent_id = task["slave_id"] task_sandbox = sdk_cmd.cluster_request( "GET", "/slave/{}/files/browse?path={}".format(agent_id, task_sandbox_path) ).json() stdout_file = [f for f in task_sandbox if f["path"].endswith("/stdout")][0] assert stdout_file["size"] > 0, "stdout file should have content" finally: sdk_install.uninstall(utils.SPARK_PACKAGE_NAME, service_name)