def api_url(): client = marathon.create_client() tasks = client.get_tasks("kafka") if len(tasks) == 0: raise CliError("Kafka is not running") base_url = util.get_config().get('kafka.url') if base_url != None: base_url = base_url.rstrip("/") else: base_url = util.get_config().get('core.dcos_url').rstrip("/") base_url += '/service/kafka' return base_url
def wait_and_track(subproc): """ Run a command and report it to analytics services. :param subproc: Subprocess to capture :type subproc: Popen :returns: exit code of subproc :rtype: int """ rollbar.init(ROLLBAR_SERVER_POST_KEY, 'prod') conf = util.get_config() report = conf.get('core.reporting', True) with ThreadPoolExecutor(max_workers=2) as pool: if report: _segment_track_cli(pool, conf) exit_code, err = wait_and_capture(subproc) # We only want to catch exceptions, not other stderr messages # (such as "task does not exist", so we look for the 'Traceback' # string. This only works for python, so we'll need to revisit # this in the future when we support subcommands written in other # languages. if report and 'Traceback' in err: _track_err(pool, exit_code, err, conf) return exit_code
def get_arangodb_framework(name): url = util.get_config().get('core.dcos_url') + ":5050/master/state.json" try: response = requests.get(url, timeout=15) except requests.exceptions.ConnectionError: print("cannot connect to '" + url + "', please check your config") sys.exit(1) if response.status_code >= 200 and response.status_code < 300: json = response.json() if 'frameworks' not in json: print(json) sys.exit(1) frameworks = json['frameworks'] for framework in frameworks: if name == framework['name']: return framework print("ArangoDB framework '" + name + "' is not running yet.") sys.exit(1) else: print("Bad response getting master state. Status code: " + str(response.status_code)) sys.exit(1)
def _base_properties(conf=None): """ These properties are sent with every analytics event. :param conf: dcos config file :type conf: Toml :rtype: dict """ if not conf: conf = util.get_config() if len(sys.argv) > 1: cmd = 'dcos ' + _command() full_cmd = 'dcos ' + ' '.join(sys.argv[1:]) else: cmd = 'dcos' full_cmd = 'dcos' try: dcos_hostname = six.moves.urllib.parse.urlparse( conf.get('core.dcos_url')).hostname except: logger.exception('Unable to find the hostname of the cluster.') dcos_hostname = None return { 'cmd': cmd, 'full_cmd': full_cmd, 'dcoscli.version': dcoscli.version, 'python_version': str(sys.version_info), 'config': json.dumps(list(conf.property_items())), 'DCOS_HOSTNAME': dcos_hostname, }
def _search(json_, query): """Search for matching packages. :param json_: output json if True :type json_: bool :param query: The search term :type query: str :returns: Process status :rtype: int """ if not query: query = '' config = util.get_config() results = [index_entry.as_dict() for index_entry in package.search(query, config)] if any(result['packages'] for result in results) or json_: emitting.publish_table(emitter, results, tables.package_search_table, json_) else: raise DCOSException('No packages found.') return 0
def get_spark_dispatcher(): dcos_spark_url = os.getenv("DCOS_SPARK_URL") if dcos_spark_url is not None: return dcos_spark_url base_url = util.get_config().get('core.dcos_url') return urllib.parse.urljoin(base_url, '/service/' + service.app_id() + '/')
def api_url(slash_command): base_config_url = util.get_config().get('cassandra.url') if base_config_url is not None: base_config_url = base_config_url.rstrip("/") else: base_config_url = "%s/service/%s" % (base_url(), get_fwk_name()) return "%s/v1%s" % (base_config_url, slash_command)
def _page(output, pager_command=None): """Conditionally pipes the supplied output through a pager. :param output: :type output: object :param pager_command: :type pager_command: str """ output = six.text_type(output) if pager_command is None: pager_command = 'less -R' if not sys.stdout.isatty() or util.is_windows_platform(): print(output) return num_lines = output.count('\n') exceeds_tty_height = pager.getheight() - 1 < num_lines paginate = util.get_config().get("core.pagination", True) if exceeds_tty_height and paginate: pydoc.pipepager(output, cmd=pager_command) else: print(output)
def _get_dcos_acs_auth(username, password, hostname): """Get authentication flow for dcos acs auth :param username: username user for authentication :type username: str :param password: password for authentication :type password: str :param hostname: hostname for credentials :type hostname: str :returns: DCOSAcsAuth :rtype: AuthBase """ toml_config = util.get_config() token = toml_config.get("core.dcos_acs_token") if token is None: dcos_url = toml_config.get("core.dcos_url") url = urllib.parse.urljoin(dcos_url, 'acs/api/v1/auth/login') if password is None: username, password = _get_auth_credentials(username, hostname) creds = {"uid": username, "password": password} # using private method here, so we don't retry on this request # error here will be bubbled up to _request_with_auth response = _request('post', url, json=creds) if response.status_code == 200: token = response.json()['token'] config.set_val("core.dcos_acs_token", token) return DCOSAcsAuth(token)
def _base_properties(conf=None): """ These properties are sent with every analytics event. :param conf: dcos config file :type conf: Toml :rtype: dict """ if not conf: conf = util.get_config() if len(sys.argv) > 1: cmd = 'dcos ' + _command() full_cmd = 'dcos ' + ' '.join(sys.argv[1:]) else: cmd = 'dcos' full_cmd = 'dcos' return { 'cmd': cmd, 'full_cmd': full_cmd, 'dcoscli.version': dcoscli.version, 'python_version': str(sys.version_info), 'config': json.dumps(list(conf.property_items())) }
def _login(): """ :returns: process status :rtype: int """ # every call to login will generate a new token if applicable _logout() conf = util.get_config() dcos_url = conf.get("core.dcos_url") if dcos_url is None: msg = ("Please provide the url to your DCOS cluster: " "`dcos config set core.dcos_url`") raise DCOSException(msg) # hit protected endpoint which will prompt for auth if cluster has auth try: url = urllib.parse.urljoin(dcos_url, 'exhibitor') http.get(url) # if the user is authenticated, they have effectively "logged in" even if # they are not authorized for this endpoint except DCOSAuthorizationException: pass emitter.publish("Login successful!") return 0
def _base_properties(conf=None): """ These properties are sent with every analytics event. :param conf: dcos config file :type conf: Toml :rtype: dict """ if not conf: conf = util.get_config() if len(sys.argv) > 1: cmd = "dcos " + _command() full_cmd = "dcos " + " ".join(sys.argv[1:]) else: cmd = "dcos" full_cmd = "dcos" return { "cmd": cmd, "full_cmd": full_cmd, "dcoscli.version": dcoscli.version, "python_version": str(sys.version_info), "config": json.dumps(list(conf.property_items())), }
def _main(): signal.signal(signal.SIGINT, signal_handler) http.silence_requests_warnings() config = util.get_config() set_ssl_info_env_vars(config) args = docopt.docopt(default_doc("dcos"), options_first=True) log_level = args['--log-level'] if log_level and not _config_log_level_environ(log_level): return 1 if args['--debug']: os.environ[constants.DCOS_DEBUG_ENV] = 'true' util.configure_process_from_environ() if args['--version']: return _get_versions(config.get("core.dcos_url")) command = args['<command>'] if not command: command = "help" if command in subcommand.default_subcommands(): sc = SubcommandMain(command, args['<args>']) else: executable = subcommand.command_executables(command) sc = subcommand.SubcommandProcess( executable, command, args['<args>']) exitcode, _ = sc.run_and_capture() return exitcode
def _request_with_auth(response, method, url, is_success=_default_is_success, timeout=None, verify=None, **kwargs): """Try request (3 times) with credentials if 401 returned from server :param response: requests.response :type response: requests.Response :param method: method for the new Request object :type method: str :param url: URL for the new Request object :type url: str :param is_success: Defines successful status codes for the request :type is_success: Function from int to bool :param timeout: request timeout :type timeout: int :param verify: whether to verify SSL certs or path to cert(s) :type verify: bool | str :param kwargs: Additional arguments to requests.request (see http://docs.python-requests.org/en/latest/api/#requests.request) :type kwargs: dict :rtype: requests.Response """ i = 0 while i < 3 and response.status_code == 401: parsed_url = urlparse(url) hostname = parsed_url.hostname auth_scheme, realm = get_auth_scheme(response) creds = (hostname, auth_scheme, realm) with lock: if creds not in AUTH_CREDS: auth = _get_http_auth(response, parsed_url, auth_scheme) else: auth = AUTH_CREDS[creds] # try request again, with auth response = _request(method, url, is_success, timeout, auth, verify, **kwargs) # only store credentials if they're valid with lock: if creds not in AUTH_CREDS and response.status_code == 200: AUTH_CREDS[creds] = auth # acs invalid token elif response.status_code == 401 and auth_scheme == "acsjwt": if util.get_config().get("core.dcos_acs_token") is not None: config.unset("core.dcos_acs_token") i += 1 if response.status_code == 401: raise DCOSAuthenticationException(response) return response
def main(): if len(sys.argv) > 1 and sys.argv[1] == "kubectl": args = sys.argv[2:] else: args = sys.argv[1:] # special --info case if len(args) == 1 and args[0] == "--info": print("Deploy and manage pods on Kubernetes") sys.exit(0) # get api url config = util.get_config() dcos_url = config.get('core.dcos_url', None) if dcos_url is None or dcos_url == "": print("Error: dcos core.dcos_url is not set") sys.exit(2) # check certificates? core_verify_ssl = config.get('core.ssl_verify', 'true') verify_certs = str(core_verify_ssl).lower() in ['true', 'yes', '1'] # silence warnings from requests.packages.urllib3. See DCOS-1007. if not verify_certs: import requests.packages.urllib3 requests.packages.urllib3.disable_warnings() # check whether kubectl binary exists and download if not try: from urlparse import urljoin # python 2 except ImportError: from urllib.parse import urljoin # python 3 master = urljoin(dcos_url, "service/kubernetes") try: kubectl_path, kubectl_url = \ kubectl_binary_path_and_url(master, verify=verify_certs) except Exception as err: print("Error: " + str(err)) return 2 if not os.path.exists(os.path.dirname(kubectl_path)): os.makedirs(os.path.dirname(kubectl_path)) if not os.path.exists(kubectl_path): download_kubectl(kubectl_url, kubectl_path) # call kubectl with parameters from subprocess import call env = os.environ.copy() if 'KUBERNETES_MASTER' in env: del env['KUBERNETES_MASTER'] ret_code = call([ kubectl_path, "--server=" + master, "--insecure-skip-tls-verify=" + str(not verify_certs).lower(), "--context=dcos-kubectl", # to nil current context settings "--username=dcos-kubectl" # to avoid username prompt ] + args, env=env) sys.exit(ret_code)
def get_spark_dispatcher(): dcos_spark_url = os.getenv("DCOS_SPARK_URL") if dcos_spark_url is not None: return dcos_spark_url base_url = util.get_config().get('core.dcos_url') # Remove http:// prefix. return base_url[7:] + '/service/sparkcli/'
def __init__(self): config = util.get_config() self._dcos_url = config.get("core.dcos_url") if self._dcos_url is None: raise util.missing_config_exception(['core.dcos_url']) self._mesos_master_url = config.get('core.mesos_master_url') self._timeout = config.get('core.timeout')
def check_if_user_authenticated(): """Check if user is authenticated already :returns user auth status :rtype: boolean """ dcos_config = util.get_config() return dcos_config.get('core.email', '') != ''
def __init__(self): config = util.get_config() self._dcos_url = None self._mesos_master_url = None mesos_master_url = config.get('core.mesos_master_url') if mesos_master_url is None: self._dcos_url = util.get_config_vals(config, ['core.dcos_url'])[0] else: self._mesos_master_url = mesos_master_url
def _get_cosmos_url(): """ :returns: cosmos base url :rtype: str """ config = util.get_config() cosmos_url = config.get("package.cosmos_url") if cosmos_url is None: cosmos_url = util.get_config_vals(['core.dcos_url'], config)[0] return cosmos_url
def get_arangodb_webui(name, internal): if name is None: name = "arangodb" if internal: arangodb_framework = get_arangodb_framework(name) return arangodb_framework['webui_url'].rstrip("/") else: base_url = util.get_config().get('core.dcos_url').rstrip("/") return base_url + "/service/" + name
def _logout(): """ Logout the user from dcos acs auth or oauth :returns: process status :rtype: int """ if util.get_config().get("core.dcos_acs_token") is not None: config.unset("core.dcos_acs_token") return 0
def _main(): signal.signal(signal.SIGINT, signal_handler) args = docopt.docopt( _doc(), version='dcos version {}'.format(dcoscli.version), options_first=True) log_level = args['--log-level'] if log_level and not _config_log_level_environ(log_level): return 1 if args['--debug']: os.environ[constants.DCOS_DEBUG_ENV] = 'true' util.configure_process_from_environ() if args['<command>'] != 'config' and \ not auth.check_if_user_authenticated(): auth.force_auth() config = util.get_config() set_ssl_info_env_vars(config) command = args['<command>'] http.silence_requests_warnings() if not command: command = "help" executable = subcommand.command_executables(command) cluster_id = None if dcoscli.version != 'SNAPSHOT' and command and \ command not in ["config", "help"]: try: cluster_id = mesos.DCOSClient().metadata().get('CLUSTER_ID') except DCOSAuthenticationException: raise except: msg = 'Unable to get the cluster_id of the cluster.' logger.exception(msg) # the call to retrieve cluster_id must happen before we run the subcommand # so that if you have auth enabled we don't ask for user/pass multiple # times (with the text being out of order) before we can cache the auth # token subproc = Popen([executable, command] + args['<args>'], stderr=PIPE) if dcoscli.version != 'SNAPSHOT': return analytics.wait_and_track(subproc, cluster_id) else: return analytics.wait_and_capture(subproc)[0]
def __init__(self): config = util.get_config() self._dcos_url = None self._mesos_master_url = None mesos_master_url = config.get('core.mesos_master_url') if mesos_master_url is None: self._dcos_url = util.get_config_vals(['core.dcos_url'], config)[0] else: self._mesos_master_url = mesos_master_url self._timeout = config.get('core.timeout')
def print_env(): url = util.get_config().get('core.dcos_url') if url.startswith("http://"): url = url[7:] else: return 1; if url.endswith("/"): url = url[:-1] print("export DOCKER_HOST=" + url + ":80/service/swarm") return 0;
def api_url(framework): client = marathon.create_client() tasks = client.get_tasks(framework) if len(tasks) == 0: usage() print("\nTry running the following to verify that "+ framework + " is the name \nof your service instance:\n") print(" dcos service\n") raise CliError("Riak is not running, try with --framework <framework-name>.") base_url = util.get_config().get('core.dcos_url').rstrip("/") return base_url + "/service/" + framework + "/"
def api_url(app_id="kafka"): client = marathon.create_client() tasks = client.get_tasks(app_id) if len(tasks) == 0: raise CliError("Kafka is not running") base_url = util.get_config().get('kafka.url') if base_url != None: base_url = base_url.rstrip("/") dcos_url = util.get_config().get('core.dcos_url') if dcos_url != None: base_url = util.get_config().get('core.dcos_url').rstrip("/") base_url += '/service/kafka' cell_url = util.get_config().get('core.cell_url') if cell_url != None: base_url = cell_url.format(service=app_id.replace("/", "_")) return base_url
def __init__(self): config = util.get_config() self._dcos_url = None self._mesos_master_url = None mesos_master_url = config.get("core.mesos_master_url") if mesos_master_url is None: self._dcos_url = util.get_config_vals(["core.dcos_url"], config)[0] else: self._mesos_master_url = mesos_master_url self._timeout = config.get("core.timeout")
def _main(): signal.signal(signal.SIGINT, signal_handler) args = docopt.docopt(_doc(), version='dcos version {}'.format(dcoscli.version), options_first=True) log_level = args['--log-level'] if log_level and not _config_log_level_environ(log_level): return 1 if args['--debug']: os.environ[constants.DCOS_DEBUG_ENV] = 'true' util.configure_process_from_environ() if args['<command>'] != 'config' and \ not auth.check_if_user_authenticated(): auth.force_auth() config = util.get_config() set_ssl_info_env_vars(config) command = args['<command>'] http.silence_requests_warnings() if not command: command = "help" executable = subcommand.command_executables(command) cluster_id = None if dcoscli.version != 'SNAPSHOT' and command and \ command not in ["config", "help"]: try: cluster_id = mesos.DCOSClient().metadata().get('CLUSTER_ID') except DCOSAuthenticationException: raise except: msg = 'Unable to get the cluster_id of the cluster.' logger.exception(msg) # the call to retrieve cluster_id must happen before we run the subcommand # so that if you have auth enabled we don't ask for user/pass multiple # times (with the text being out of order) before we can cache the auth # token subproc = Popen([executable, command] + args['<args>'], stderr=PIPE) if dcoscli.version != 'SNAPSHOT': return analytics.wait_and_track(subproc, cluster_id) else: return analytics.wait_and_capture(subproc)[0]
def _update(validate): """Update local package definitions from sources. :param validate: Whether to validate package content when updating sources. :type validate: bool :returns: Process status :rtype: int """ config = util.get_config() package.update_sources(config, validate) return 0
def _request(self): self.server._dcos_auth_token dcos_url = util.get_config().get('core.dcos_url') url = dcos_url + '/service/sparkcli' + self.path if self.headers.getheader('content-length'): body = self.rfile.read( int(self.headers.getheader('content-length'))) req = urllib.request.Request(url, body) else: body = '' req = urllib.request.Request(url) logger.debug('=== BEGIN REQUEST ===') logger.debug(url) logger.debug('\n') for line in self.headers.headers: key, value = line.strip().split(':', 1) logger.debug('{0}:{1}'.format(key, value)) req.add_header(key, value) req.add_header( 'Authorization', 'token={}'.format(self.server._dcos_auth_token)) logger.debug('\n') logger.debug(body) try: resp = urllib.request.urlopen(req) except urllib.error.HTTPError as e: resp = e self.send_response(resp.getcode()) logger.debug('=== BEGIN RESPONSE ===') logger.debug(resp.getcode()) for header in resp.info().headers: key, value = header.strip().split(':', 1) self.send_header(key, value) logger.debug('{0}:{1}'.format(key, value)) self.end_headers() body = resp.read() self.wfile.write(body) logger.debug('\n') logger.debug(body)
def get_master_client(config=None): """Create a Mesos master client using the URLs stored in the user's configuration. :param config: config :type config: Toml :returns: mesos master client :rtype: MasterClient """ if config is None: config = util.get_config() mesos_url = _get_mesos_url(config) return MasterClient(mesos_url)
def _list_sources(): """List configured package sources. :returns: Process status :rtype: int """ config = util.get_config() sources = package.list_sources(config) for source in sources: emitter.publish("{} {}".format(source.hash(), source.url)) return 0
def _request(self): self.server._dcos_auth_token dcos_url = util.get_config().get('core.dcos_url') url = dcos_url + '/service/sparkcli' + self.path if self.headers.getheader('content-length'): body = self.rfile.read( int(self.headers.getheader('content-length'))) req = urllib.request.Request(url, body) else: body = '' req = urllib.request.Request(url) logger.debug('=== BEGIN REQUEST ===') logger.debug(url) logger.debug('\n') for line in self.headers.headers: key, value = line.strip().split(':', 1) logger.debug('{0}:{1}'.format(key, value)) req.add_header(key, value) req.add_header('Authorization', 'token={}'.format(self.server._dcos_auth_token)) logger.debug('\n') logger.debug(body) try: resp = urllib.request.urlopen(req) except urllib.error.HTTPError as e: resp = e self.send_response(resp.getcode()) logger.debug('=== BEGIN RESPONSE ===') logger.debug(resp.getcode()) for header in resp.info().headers: key, value = header.strip().split(':', 1) self.send_header(key, value) logger.debug('{0}:{1}'.format(key, value)) self.end_headers() body = resp.read() self.wfile.write(body) logger.debug('\n') logger.debug(body)
def set_val(name, value): """ :param name: name of paramater :type name: str :param value: value to set to paramater `name` :type param: str :returns: Toml config :rtype: Toml """ toml_config = util.get_config(True) section, subkey = split_key(name) config_schema = get_config_schema(section) new_value = jsonitem.parse_json_value(subkey, value, config_schema) toml_config_pre = copy.deepcopy(toml_config) if section not in toml_config_pre._dictionary: toml_config_pre._dictionary[section] = {} value_exists = name in toml_config old_value = toml_config.get(name) toml_config[name] = new_value check_config(toml_config_pre, toml_config) save(toml_config) msg = "[{}]: ".format(name) if name == "core.dcos_acs_token": if not value_exists: msg += "set" elif old_value == new_value: msg += "already set to that value" else: msg += "changed" elif not value_exists: msg += "set to '{}'".format(new_value) elif old_value == new_value: msg += "already set to '{}'".format(old_value) else: msg += "changed from '{}' to '{}'".format(old_value, new_value) emitter.publish(DefaultError(msg)) return toml_config
def _validate(): """ :returns: process status :rtype: int """ toml_config = util.get_config(True) errs = util.validate_json(toml_config._dictionary, config.generate_root_schema(toml_config)) if len(errs) != 0: emitter.publish(util.list_to_err(errs)) return 1 emitter.publish("Congratulations, your configuration is valid!") return 0
def create_client(config=None): """Creates a Marathon client with the supplied configuration. :param config: configuration dictionary :type config: config.Toml :returns: Marathon client :rtype: dcos.marathon.Client """ if config is None: config = util.get_config() marathon_url = _get_marathon_url(config) logger.info('Creating marathon client with: %r', marathon_url) return Client(marathon_url)
def create_client(config=None): """Creates a Marathon client with the supplied configuration. :param config: configuration dictionary :type config: config.Toml :returns: Marathon client :rtype: dcos.marathon.Client """ if config is None: config = util.get_config() marathon_url = _get_marathon_url(config) timeout = config.get('core.timeout', http.DEFAULT_TIMEOUT) logger.info('Creating marathon client with: %r', marathon_url) return Client(marathon_url, timeout=timeout)
def _unset(name, index): """ :returns: process status :rtype: int """ toml_config = util.get_config(True) toml_config_pre = copy.deepcopy(toml_config) section = name.split(".", 1)[0] if section not in toml_config_pre._dictionary: toml_config_pre._dictionary[section] = {} value = toml_config.pop(name, None) if value is None: raise DCOSException("Property {!r} doesn't exist".format(name)) elif isinstance(value, collections.Mapping): raise DCOSException(_generate_choice_msg(name, value)) elif ((isinstance(value, collections.Sequence) and not isinstance(value, six.string_types)) and index is not None): index = util.parse_int(index) if not value: raise DCOSException( 'Index ({}) is out of bounds - [{}] is empty'.format( index, name)) if index < 0 or index >= len(value): raise DCOSException( 'Index ({}) is out of bounds - possible values are ' 'between {} and {}'.format(index, 0, len(value) - 1)) popped_value = value.pop(index) emitter.publish("[{}]: removed element '{}' at index '{}'".format( name, popped_value, index)) toml_config[name] = value config.save(toml_config) return 0 elif index is not None: raise DCOSException( 'Unsetting based on an index is only supported for lists') else: emitter.publish("Removed [{}]".format(name)) config.save(toml_config) return 0
def _prepend(name, value): """ :returns: process status :rtype: int """ toml_config = util.get_config(True) python_value = _parse_array_item(name, value) toml_config_pre = copy.deepcopy(toml_config) section = name.split(".", 1)[0] if section not in toml_config_pre._dictionary: toml_config_pre._dictionary[section] = {} toml_config[name] = python_value + toml_config.get(name, []) _check_config(toml_config_pre, toml_config) config.save(toml_config) return 0
def _save_auth_keys(key_dict): """ :param key_dict: auth parameters dict :type key_dict: dict :rtype: None """ toml_config = util.get_config(True) section = 'core' config_schema = json.loads( pkg_resources.resource_string( 'dcoscli', 'data/config-schema/core.json').decode('utf-8')) for k, v in iteritems(key_dict): python_value = jsonitem.parse_json_value(k, v, config_schema) name = '{}.{}'.format(section, k) toml_config[name] = python_value config.save(toml_config) return None
def _list(json_, endpoints, app_id, package_name): """List installed apps :param json_: output json if True :type json_: bool :param endpoints: Whether to include a list of endpoints as port-host pairs :type endpoints: boolean :param app_id: App ID of app to show :type app_id: str :param package_name: The package to show :type package_name: str :returns: process return code :rtype: int """ config = util.get_config() init_client = marathon.create_client(config) installed = package.installed_packages(init_client, endpoints) # only emit those packages that match the provided package_name and app_id results = [] for pkg in installed: pkg_info = pkg.dict() if (_matches_package_name(package_name, pkg_info) and _matches_app_id(app_id, pkg_info)): if app_id: # if the user is asking a specific id then only show that id pkg_info['apps'] = [ app for app in pkg_info['apps'] if app == app_id ] results.append(pkg_info) if results or json_: emitting.publish_table(emitter, results, tables.package_table, json_) else: msg = ("There are currently no installed packages. " "Please use `dcos package install` to install a package.") raise DCOSException(msg) return 0
def resolve_package(package_name, config=None): """Returns the first package with the supplied name found by looking at the configured sources in the order they are defined. :param package_name: The name of the package to resolve :type package_name: str :param config: dcos config :type config: dcos.config.Toml | None :returns: The named package, if found :rtype: Package """ if not config: config = util.get_config() for registry in registries(config): package = registry.get_package(package_name) if package: return package return None
def set_val(name, value): """ :param name: name of paramater :type name: str :param value: value to set to paramater `name` :type param: str :returns: Toml config :rtype: Toml """ toml_config = util.get_config(True) section, subkey = split_key(name) config_schema = get_config_schema(section) new_value = jsonitem.parse_json_value(subkey, value, config_schema) toml_config_pre = copy.deepcopy(toml_config) if section not in toml_config_pre._dictionary: toml_config_pre._dictionary[section] = {} value_exists = name in toml_config old_value = toml_config.get(name) toml_config[name] = new_value check_config(toml_config_pre, toml_config) save(toml_config) if not value_exists: emitter.publish("[{}]: set to '{}'".format(name, new_value)) elif old_value == new_value: emitter.publish("[{}]: already set to '{}'".format(name, old_value)) else: emitter.publish("[{}]: changed from '{}' to '{}'".format( name, old_value, new_value)) return toml_config