def _install(self, args): ''' Install a package from a repo ''' if len(args) < 2: log.error('A package must be specified') return False package = args[1] log.debug('Installing package {0}'.format(package)) repo_metadata = self._get_repo_metadata() for repo in repo_metadata: repo_info = repo_metadata[repo] if package in repo_metadata[repo]['packages']: cache_path = '{0}/{1}'.format( self.opts['spm_cache_dir'], repo ) dl_path = '{0}/{1}'.format(repo_info['info']['url'], repo_info['packages'][package]['filename']) out_file = '{0}/{1}'.format(cache_path, repo_info['packages'][package]['filename']) if not os.path.exists(cache_path): os.makedirs(cache_path) if dl_path.startswith('file://'): dl_path = dl_path.replace('file://', '') shutil.copyfile(dl_path, out_file) else: http.query(dl_path, text_out=out_file) self._local_install(out_file) return
def _search(prefix="computeMetadata/v1/"): """ Recursively look up all grains in the metadata server """ ret = {} heads = ["Metadata-Flavor: Google"] linedata = http.query(os.path.join(HOST, prefix), headers=True, header_list=heads) if "body" not in linedata: return ret body = salt.utils.stringutils.to_unicode(linedata["body"]) if (linedata["headers"].get("Content-Type", "text/plain") == "application/octet-stream"): return body for line in body.split("\n"): # Block list, null bytes are causing oddities. and project contains ssh keys used to login to the system. # so keeping both from showing up in the grains. if line in ["", "project/"]: continue if line.endswith("/"): ret[line[:-1]] = _search(prefix=os.path.join(prefix, line)) else: retdata = http.query(os.path.join(HOST, prefix, line), header_list=heads).get("body", None) if isinstance(retdata, bytes): try: ret[line] = salt.utils.json.loads( salt.utils.stringutils.to_unicode(retdata)) except ValueError: ret[line] = salt.utils.stringutils.to_unicode(retdata) else: ret[line] = retdata return salt.utils.data.decode(ret)
def _search(prefix="latest/"): ''' Recursively look up all grains in the metadata server ''' ret = {} linedata = http.query(os.path.join(HOST, prefix), headers=True) if 'body' not in linedata: return ret if linedata['headers'].get('Content-Type', 'text/plain') == 'application/octet-stream': return linedata['body'] for line in linedata['body'].split('\n'): if line.endswith('/'): ret[line[:-1]] = _search(prefix=os.path.join(prefix, line)) elif prefix == 'latest/': # (gtmanfred) The first level should have a forward slash since # they have stuff underneath. This will not be doubled up though, # because lines ending with a slash are checked first. ret[line] = _search(prefix=os.path.join(prefix, line + '/')) elif '=' in line: key, value = line.split('=') ret[value] = _search(prefix=os.path.join(prefix, key)) else: retdata = http.query(os.path.join(HOST, prefix, line)).get('body', None) # (gtmanfred) This try except block is slightly faster than # checking if the string starts with a curly brace try: ret[line] = json.loads(retdata) except ValueError: ret[line] = retdata return ret
def _install(self, args): ''' Install a package from a repo ''' if len(args) < 2: raise SPMInvocationError('A package must be specified') package = args[1] self._verbose('Installing package {0}'.format(package), log.debug) repo_metadata = self._get_repo_metadata() for repo in repo_metadata: repo_info = repo_metadata[repo] if package in repo_metadata[repo]['packages']: cache_path = '{0}/{1}'.format( self.opts['spm_cache_dir'], repo ) dl_path = '{0}/{1}'.format(repo_info['info']['url'], repo_info['packages'][package]['filename']) out_file = '{0}/{1}'.format(cache_path, repo_info['packages'][package]['filename']) if not os.path.exists(cache_path): os.makedirs(cache_path) if dl_path.startswith('file://'): dl_path = dl_path.replace('file://', '') shutil.copyfile(dl_path, out_file) else: http.query(dl_path, text_out=out_file) self._local_install((None, out_file), package) return raise SPMPackageError('Cannot install package {0}, no source package'.format(package))
def test_query_error_handling(self): ret = http.query("http://127.0.0.1:0") self.assertTrue(isinstance(ret, dict)) self.assertTrue(isinstance(ret.get("error", None), str)) ret = http.query("http://myfoobardomainthatnotexist") self.assertTrue(isinstance(ret, dict)) self.assertTrue(isinstance(ret.get("error", None), str))
def __virtual__(): global INSTANCE_ID log.debug("Checking if minion is running in the public cloud") sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(0.1) result = sock.connect_ex((INTERNAL_API_IP, 80)) if result != 0: return False def _do_api_request(data): opts = { 'http_connect_timeout': 0.1, 'http_request_timeout': 0.1, } try: ret = { data[0]: http.query(data[1], status=True, header_dict=data[2], raise_error=False, opts=opts) } except: ret = { data[0]: dict() } return ret api_check_dict = [ ('amazon', os.path.join(HOST, AMAZON_URL_PATH), None), ('google', os.path.join(HOST, GOOGLE_URL_PATH), {"Metadata-Flavor": "Google"}), ('azure', os.path.join(HOST, AZURE_URL_PATH) + AZURE_API_ARGS, {"Metadata":"true"}), ] api_ret = {} results = [] try: pool = ThreadPool(3) results = pool.map(_do_api_request, api_check_dict) pool.close() pool.join() except Exception as exc: import traceback log.error(traceback.format_exc()) log.error("Exception while creating a ThreadPool for accessing metadata API: %s", exc) for i in results: api_ret.update(i) if _is_valid_endpoint(api_ret['amazon'], 'instance-id'): INSTANCE_ID = http.query(os.path.join(HOST, AMAZON_URL_PATH, 'instance-id'), raise_error=False)['body'] return True elif _is_valid_endpoint(api_ret['azure'], 'vmId'): INSTANCE_ID = http.query(os.path.join(HOST, AZURE_URL_PATH, 'vmId') + AZURE_API_ARGS, header_dict={"Metadata":"true"}, raise_error=False)['body'] return True elif _is_valid_endpoint(api_ret['google'], 'id'): INSTANCE_ID = http.query(os.path.join(HOST, GOOGLE_URL_PATH, 'id'), header_dict={"Metadata-Flavor": "Google"}, raise_error=False)['body'] return True return False
def __virtual__(): global INSTANCE_ID sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(0.1) result = sock.connect_ex((INTERNAL_API_IP, 80)) if result != 0: return False def _do_api_request(data): return {data[0]: http.query(data[1], status=True, header_dict=data[2])} api_check_dict = [ ('amazon', os.path.join(HOST, AMAZON_URL_PATH), None), ('google', os.path.join(HOST, GOOGLE_URL_PATH), { "Metadata-Flavor": "Google" }), ('azure', os.path.join(HOST, AZURE_URL_PATH) + AZURE_API_ARGS, { "Metadata": "true" }), ] api_ret = {} results = [] try: pool = ThreadPool(3) results = pool.map(_do_api_request, api_check_dict) pool.close() pool.join() except Exception as exc: log.error( "Exception while creating a ThreadPool for accessing metadata API: %s", exc) for i in results: api_ret.update(i) if api_ret['amazon'].get( 'status') == 200 and "instance-id" in api_ret['amazon']['body']: INSTANCE_ID = http.query( os.path.join(HOST, AMAZON_URL_PATH, 'instance-id'))['body'] return True elif api_ret['azure'].get( 'status') == 200 and "vmId" in api_ret['azure']['body']: INSTANCE_ID = http.query(os.path.join(HOST, AZURE_URL_PATH, 'vmId') + AZURE_API_ARGS, header_dict={"Metadata": "true"})['body'] return True elif api_ret['google'].get( 'status') == 200 and "id" in api_ret['google']['body']: INSTANCE_ID = http.query(os.path.join(HOST, GOOGLE_URL_PATH, 'id'), header_dict={"Metadata-Flavor": "Google"})['body'] return True return False
def _search(prefix="latest/"): ''' Recursively look up all grains in the metadata server ''' ret = {} for line in http.query(os.path.join(HOST, prefix))['body'].split('\n'): if line.endswith('/'): ret[line[:-1]] = _search(prefix=os.path.join(prefix, line)) elif '=' in line: key, value = line.split('=') ret[value] = _search(prefix=os.path.join(prefix, key)) else: ret[line] = http.query(os.path.join(HOST, prefix, line))['body'] return ret
def get(key, service=None, profile=None): # pylint: disable=W0613 ''' Get a decrypted secret from the tISMd API ''' if not profile.get('url') or not profile.get('token'): raise SaltConfigurationError( "url and/or token missing from the tism sdb profile") request = {"token": profile['token'], "encsecret": key} result = http.query( profile['url'], method='POST', data=salt.utils.json.dumps(request), ) decrypted = result.get('body') if not decrypted: log.warning('tism.get sdb decryption request failed with error %s', result.get('error', 'unknown')) return 'ERROR' + six.text_type(result.get('status', 'unknown')) return decrypted
def query(key, value=None, service=None, profile=None): # pylint: disable=W0613 """ Get a value from the REST interface """ comps = key.split("?") key = comps[0] key_vars = {} for pair in comps[1].split("&"): pair_key, pair_val = pair.split("=") key_vars[pair_key] = pair_val renderer = __opts__.get("renderer", "jinja|yaml") rend = salt.loader.render(__opts__, {}) blacklist = __opts__.get("renderer_blacklist") whitelist = __opts__.get("renderer_whitelist") url = compile_template(":string:", rend, renderer, blacklist, whitelist, input_data=profile[key]["url"], **key_vars) extras = {} for item in profile[key]: if item not in ("backend", "url"): extras[item] = profile[key][item] result = http.query(url, decode=True, **extras) return result["dict"]
def query(key, value=None, service=None, profile=None): # pylint: disable=W0613 ''' Get a value from the REST interface ''' comps = key.split('?') key = comps[0] key_vars = {} for pair in comps[1].split('&'): pair_key, pair_val = pair.split('=') key_vars[pair_key] = pair_val renderer = __opts__.get('renderer', 'yaml_jinja') rend = salt.loader.render(__opts__, {}) blacklist = __opts__.get('renderer_blacklist') whitelist = __opts__.get('renderer_whitelist') url = compile_template( ':string:', rend, renderer, blacklist, whitelist, input_data=profile[key]['url'], **key_vars ) result = http.query( url, decode=True, **key_vars ) return result['dict']
def query(key, value=None, service=None, profile=None): # pylint: disable=W0613 ''' Get a value from the REST interface ''' comps = key.split('?') key = comps[0] key_vars = {} for pair in comps[1].split('&'): pair_key, pair_val = pair.split('=') key_vars[pair_key] = pair_val renderer = __opts__.get('renderer', 'yaml_jinja') rend = salt.loader.render(__opts__, {}) blacklist = __opts__.get('renderer_blacklist') whitelist = __opts__.get('renderer_whitelist') url = compile_template(':string:', rend, renderer, blacklist, whitelist, input_data=profile[key]['url'], **key_vars) result = http.query(url, decode=True, **key_vars) return result['dict']
def _install(self, args): ''' Install a package from a repo ''' if len(args) < 2: raise SPMInvocationError('A package must be specified') package = args[1] log.debug('Installing package {0}'.format(package)) repo_metadata = self._get_repo_metadata() for repo in repo_metadata: repo_info = repo_metadata[repo] if package in repo_metadata[repo]['packages']: cache_path = '{0}/{1}'.format( self.opts['spm_cache_dir'], repo ) dl_path = '{0}/{1}'.format(repo_info['info']['url'], repo_info['packages'][package]['filename']) out_file = '{0}/{1}'.format(cache_path, repo_info['packages'][package]['filename']) if not os.path.exists(cache_path): os.makedirs(cache_path) if dl_path.startswith('file://'): dl_path = dl_path.replace('file://', '') shutil.copyfile(dl_path, out_file) else: response = http.query(dl_path, text=True) with salt.utils.fopen(out_file, 'w') as outf: outf.write(response.get('text')) self._local_install((None, out_file), package) return raise SPMPackageError('Cannot install package {0}, no source package'.format(package))
def delete_secret(namespace, name, apiserver_url=None, force=True): ''' .. versionadded:: 2016.3.0 Delete kubernetes secret in the defined namespace. Namespace is the mandatory parameter as well as name. CLI Example: .. code-block:: bash salt '*' k8s.delete_secret namespace_name secret_name salt '*' k8s.delete_secret namespace_name secret_name http://kube-master.cluster.local ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} # Try to get kubernetes master apiserver_url = _guess_apiserver(apiserver_url) if apiserver_url is None: return False # we need namespace to delete secret in it if not _get_namespaces(apiserver_url, namespace): return {'name': name, 'result': False, 'comment': "Namespace doesn't exists, can't delete anything there", 'changes': {}} url = "{0}/api/v1/namespaces/{1}/secrets/{2}".format(apiserver_url, namespace, name) res = http.query(url, method='DELETE') if res.get('body'): ret['comment'] = "Removed secret {0} in {1} namespace".format(name, namespace) return ret
def run(): if __opts__['test']: return { 'token': { 'test.configurable_test_state': [{ 'name': 'mitoken' }, { 'result': True }, { 'changes': not grains.get('mitoken') }] } } return { 'token': { 'grains.present': [{ 'name': 'mitoken' }, { 'value': grains.get('mitoken') or json.loads( query('http://jsonplaceholder.typicode.com/posts/1', decode_type=False)['body'])['id'] }] } }
def get(key, service=None, profile=None): # pylint: disable=W0613 """ Get a decrypted secret from the tISMd API """ if not profile.get("url") or not profile.get("token"): raise SaltConfigurationError( "url and/or token missing from the tism sdb profile") request = {"token": profile["token"], "encsecret": key} result = http.query( profile["url"], method="POST", data=salt.utils.json.dumps(request), ) decrypted = result.get("body") if not decrypted: log.warning( "tism.get sdb decryption request failed with error %s", result.get("error", "unknown"), ) return "ERROR" + six.text_type(result.get("status", "unknown")) return decrypted
def __virtual__(): if __opts__.get("metadata_server_grains", False) is False: return False googletest = http.query(HOST, status=True, headers=True) if (googletest.get("status", 404) != 200 or googletest.get( "headers", {}).get("Metadata-Flavor", False) != "Google"): return False return True
def _do_api_request(data): return { data[0]: http.query(data[1], status=True, header_dict=data[2], raise_error=False) }
def _extension_info(uuid): shell_version = _shell_major_version() url = EXTENSION_BASE_URL + EXTENSION_INFO_URL.format( urllib.quote_plus(uuid), shell_version) log.debug('Looking up extension {0} at URL {1}'.format(uuid, url)) info = query(url, decode=False) log.debug('Got extension info {0}'.format(info)) return json.loads(info['body'].strip())
def _query_http(self, dl_path, repo_info, decode_body=True): """ Download files via http """ query = None response = None try: if "username" in repo_info: try: if "password" in repo_info: query = http.query( dl_path, text=True, username=repo_info["username"], password=repo_info["password"], decode_body=decode_body, ) else: raise SPMException( "Auth defined, but password is not set for username: '******'" .format(repo_info["username"])) except SPMException as exc: self.ui.error(str(exc)) else: query = http.query(dl_path, text=True, decode_body=decode_body) except SPMException as exc: self.ui.error(str(exc)) try: if query: if "SPM-METADATA" in dl_path: response = salt.utils.yaml.safe_load( query.get("text", "{}")) else: response = query.get("text") else: raise SPMException( "Response is empty, please check for Errors above.") except SPMException as exc: self.ui.error(str(exc)) return response
def _get_namespaces(apiserver_url, name=""): '''Get namespace is namespace is defined otherwise return all namespaces''' # Prepare URL url = "{0}/api/v1/namespaces/{1}".format(apiserver_url, name) # Make request ret = http.query(url) if ret.get("body"): return json.loads(ret.get("body")) else: return None
def _get_namespaces(apiserver_url, name=""): """Get namespace is namespace is defined otherwise return all namespaces""" # Prepare URL url = "{}/api/v1/namespaces/{}".format(apiserver_url, name) # Make request ret = http.query(url) if ret.get("body"): return salt.utils.json.loads(ret.get("body")) else: return None
def test_backends_decode_body_true(self): """ test all backends when using decode_body=True that it returns string and decodes it. """ for backend in ["tornado", "requests", "urllib2"]: ret = http.query(self.get_webserver.url("core.sls"), backend=backend) body = ret.get("body", "") assert isinstance(body, str)
def _get_secrets(namespace, name, apiserver_url): """Get secrets of the namespace.""" # Prepare URL url = "{0}/api/v1/namespaces/{1}/secrets/{2}".format(apiserver_url, namespace, name) # Make request ret = http.query(url) if ret.get("body"): return salt.utils.json.loads(ret.get("body")) else: return None
def test(): print("Testing HTTP Against Each Minion") cli = client.LocalClient(__opts__['conf_file']) minions = cli.cmd('*', 'test.ping', timeout=1) success = True for minion in sorted(minions): result = http.query('http://{}'.format(minion), status=True) if result['status'] != 200: success = False print("{}: {}".format(minion, result['body'])) return success
def __virtual__(): if __opts__.get('metadata_server_grains', False) is False: return False sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(.1) result = sock.connect_ex((IP, 80)) if result != 0: return False if http.query(os.path.join(HOST, 'latest/'), status=True).get('status') != 200: return False return True
def _get_secrets(namespace, name, apiserver_url): '''Get secrets of the namespace.''' # Prepare URL url = "{0}/api/v1/namespaces/{1}/secrets/{2}".format(apiserver_url, namespace, name) # Make request ret = http.query(url) if ret.get("body"): return json.loads(ret.get("body")) else: return None
def _update_metadata(repo, repo_info): dl_path = '{0}/SPM-METADATA'.format(repo_info['url']) if dl_path.startswith('file://'): dl_path = dl_path.replace('file://', '') with salt.utils.fopen(dl_path, 'r') as rpm: metadata = yaml.safe_load(rpm) else: response = http.query(dl_path, text=True) metadata = yaml.safe_load(response.get('text', '{}')) cache.store('.', repo, metadata)
def _search(prefix="latest/"): """ Recursively look up all grains in the metadata server """ ret = {} linedata = http.query(os.path.join(HOST, prefix), headers=True) if "body" not in linedata: return ret body = salt.utils.stringutils.to_unicode(linedata["body"]) if ( linedata["headers"].get("Content-Type", "text/plain") == "application/octet-stream" ): return body for line in body.split("\n"): if line.endswith("/"): ret[line[:-1]] = _search(prefix=os.path.join(prefix, line)) elif prefix == "latest/": # (gtmanfred) The first level should have a forward slash since # they have stuff underneath. This will not be doubled up though, # because lines ending with a slash are checked first. ret[line] = _search(prefix=os.path.join(prefix, line + "/")) elif line.endswith(("dynamic", "meta-data")): ret[line] = _search(prefix=os.path.join(prefix, line)) elif "=" in line: key, value = line.split("=") ret[value] = _search(prefix=os.path.join(prefix, key)) else: retdata = http.query(os.path.join(HOST, prefix, line)).get("body", None) # (gtmanfred) This try except block is slightly faster than # checking if the string starts with a curly brace if isinstance(retdata, bytes): try: ret[line] = salt.utils.json.loads( salt.utils.stringutils.to_unicode(retdata) ) except ValueError: ret[line] = salt.utils.stringutils.to_unicode(retdata) else: ret[line] = retdata return salt.utils.data.decode(ret)
def _kput(url, data): ''' put any object in kubernetes based on URL ''' # Prepare headers headers = {"Content-Type": "application/json"} # Make request ret = http.query(url, method='PUT', header_dict=headers, data=json.dumps(data)) # Check requests status if ret.get('error'): return ret else: return json.loads(ret.get('body'))
def _query_http(self, dl_path, repo_info): ''' Download files via http ''' query = None response = None try: if 'username' in repo_info: try: if 'password' in repo_info: query = http.query(dl_path, text=True, username=repo_info['username'], password=repo_info['password']) else: raise SPMException( 'Auth defined, but password is not set for username: \'{0}\'' .format(repo_info['username'])) except SPMException as exc: self.ui.error(six.text_type(exc)) else: query = http.query(dl_path, text=True) except SPMException as exc: self.ui.error(six.text_type(exc)) try: if query: if 'SPM-METADATA' in dl_path: response = salt.utils.yaml.safe_load( query.get('text', '{}')) else: response = query.get('text') else: raise SPMException( 'Response is empty, please check for Errors above.') except SPMException as exc: self.ui.error(six.text_type(exc)) return response
def _kpost(url, data): ''' create any object in kubernetes based on URL ''' # Prepare headers headers = {"Content-Type": "application/json"} # Make request log.trace("url is: {0}, data is: {1}".format(url, data)) ret = http.query(url, method='POST', header_dict=headers, data=json.dumps(data)) # Check requests status if ret.get('error'): return ret else: return json.loads(ret.get('body'))
def _update_metadata(repo, repo_info): dl_path = '{0}/SPM-METADATA'.format(repo_info['url']) if dl_path.startswith('file://'): dl_path = dl_path.replace('file://', '') with salt.utils.fopen(dl_path, 'r') as rpm: metadata = yaml.safe_load(rpm) else: response = http.query(dl_path, text=True) metadata = response.get('text', {}) cache_path = '{0}/{1}.p'.format(self.opts['spm_cache_dir'], repo) with salt.utils.fopen(cache_path, 'w') as cph: msgpack.dump(metadata, cph)
def service_probe(name, url_path, timeout=None): """ Probe if service has been started successfully. name The name of the Zoomdata service package url_path The URL path to service health HTTP endpoint. Used to check if the service is available. timeout Wait for specified amount of time (in seconds) for service to come up and respond to requests. Works only if ``url_path`` has been provided. """ ret = { 'name': name, 'changes': {}, 'result': False, 'comment': 'The service {} not found.'.format(name), 'pchanges': {}, } # pylint: disable=undefined-variable if __opts__['test']: ret['comment'] = 'The state will probe service of readiness' ret['result'] = None return ret service = name.replace('zoomdata-', '', 1) prefix = __salt__['defaults.get']('zoomdata:zoomdata:prefix') port = __salt__['zoomdata.properties']( __salt__['file.join'](prefix, 'conf', '{}.properties'.format(service)) )['server.port'] url = urljoin('http://localhost:{}/'.format(port), url_path) if timeout: res = __salt__['http.wait_for_successful_query']( url, wait_for=timeout, status=200) # pylint: enable=undefined-variable else: res = http.query(url) if res and not res.get('error'): ret['result'] = True ret['comment'] = res['body'] return ret
def _kpatch(url, data): ''' patch any object in kubernetes based on URL ''' # Prepare headers headers = {"Content-Type": "application/json-patch+json"} # Make request ret = http.query(url, method='PATCH', header_dict=headers, data=json.dumps(data)) # Check requests status if ret.get('error'): log.error("Got an error: {0}".format(ret.get("error"))) return ret else: return json.loads(ret.get('body'))
def test_requests_multipart_formdata_post(self): ''' Test handling of a multipart/form-data POST using the requests backend ''' match_this = '{0}\r\nContent-Disposition: form-data; name="fieldname_here"\r\n\r\nmydatahere\r\n{0}--\r\n' ret = http.query(self.post_web_root, method='POST', data='mydatahere', formdata=True, formdata_fieldname='fieldname_here', backend='requests') body = ret.get('body', '') boundary = body[:body.find('\r')] self.assertEqual(body, match_this.format(boundary))
def _get_labels(node, apiserver_url): '''Get all labels from a kube node.''' # Prepare URL url = "{0}/api/v1/nodes/{1}".format(apiserver_url, node) # Make request ret = http.query(url) # Check requests status if 'body' in ret: ret = json.loads(ret.get('body')) elif ret.get('status', 0) == 404: return "Node {0} doesn't exist".format(node) else: return ret # Get and return labels return ret.get('metadata', {}).get('labels', {})
def _update_metadata(repo, repo_info): dl_path = '{0}/SPM-METADATA'.format(repo_info['url']) if dl_path.startswith('file://'): dl_path = dl_path.replace('file://', '') with salt.utils.fopen(dl_path, 'r') as rpm: metadata = yaml.safe_load(rpm) else: response = http.query(dl_path, text=True) metadata = response.get('text', {}) cache_path = '{0}/{1}.p'.format( self.opts['spm_cache_dir'], repo ) with salt.utils.fopen(cache_path, 'w') as cph: msgpack.dump(metadata, cph)
def get(key, service=None, profile=None): # pylint: disable=W0613 ''' Get a decrypted secret from the tISMd API ''' if not profile.get('url') or not profile.get('token'): raise SaltConfigurationError("url and/or token missing from the tism sdb profile") request = {"token": profile['token'], "encsecret": key} result = http.query( profile['url'], method='POST', data=json.dumps(request), ) decrypted = result.get('body') if not decrypted: log.warning('tism.get sdb decryption request failed with error {0}'.format(result.get('error', 'unknown'))) return "ERROR"+str(result.get('status', 'unknown')) return decrypted
def _install(self, args): ''' Install a package from a repo ''' if len(args) < 2: raise SPMInvocationError('A package must be specified') packages = args[1:] file_map = {} optional = [] recommended = [] to_install = [] for pkg in packages: if pkg.endswith('.spm'): if self._pkgfiles_fun('path_exists', pkg): comps = pkg.split('-') comps = '-'.join(comps[:-2]).split('/') pkg_name = comps[-1] formula_tar = tarfile.open(pkg, 'r:bz2') formula_ref = formula_tar.extractfile('{0}/FORMULA'.format(pkg_name)) formula_def = yaml.safe_load(formula_ref) file_map[pkg_name] = pkg to_, op_, re_ = self._check_all_deps( pkg_name=pkg_name, pkg_file=pkg, formula_def=formula_def ) to_install.extend(to_) optional.extend(op_) recommended.extend(re_) else: raise SPMInvocationError('Package file {0} not found'.format(pkg)) else: to_, op_, re_ = self._check_all_deps(pkg_name=pkg) to_install.extend(to_) optional.extend(op_) recommended.extend(re_) optional = set(filter(len, optional)) self.ui.status('The following dependencies are optional:\n\t{0}\n'.format( '\n\t'.join(optional) )) recommended = set(filter(len, recommended)) self.ui.status('The following dependencies are recommended:\n\t{0}\n'.format( '\n\t'.join(recommended) )) to_install = set(filter(len, to_install)) msg = 'Installing packages:\n\t{0}\n'.format('\n\t'.join(to_install)) if not self.opts['assume_yes']: self.ui.confirm(msg) repo_metadata = self._get_repo_metadata() for package in to_install: if package in file_map: self._install_indv_pkg(package, file_map[package]) else: for repo in repo_metadata: repo_info = repo_metadata[repo] if package in repo_metadata[repo]['packages']: cache_path = '{0}/{1}'.format( self.opts['spm_cache_dir'], repo ) # Download the package dl_path = '{0}/{1}'.format( repo_info['info']['url'], repo_info['packages'][package]['filename'] ) out_file = '{0}/{1}'.format( cache_path, repo_info['packages'][package]['filename'] ) if not os.path.exists(cache_path): os.makedirs(cache_path) if dl_path.startswith('file://'): dl_path = dl_path.replace('file://', '') shutil.copyfile(dl_path, out_file) else: response = http.query(dl_path, text=True) with salt.utils.fopen(out_file, 'w') as outf: outf.write(response.get("text")) # Kick off the install self._install_indv_pkg(package, out_file) return