def get_parameter( name, withdecryption=False, resp_json=False, region=None, key=None, keyid=None, profile=None, ): """ Retrieves a parameter from SSM Parameter Store .. versionadded:: 3000 .. code-block:: text salt-call boto_ssm.get_parameter test-param withdescription=True """ conn = __utils__["boto3.get_connection"]("ssm", region=region, key=key, keyid=keyid, profile=profile) try: resp = conn.get_parameter(Name=name, WithDecryption=withdecryption) except conn.exceptions.ParameterNotFound: log.warning("get_parameter: Unable to locate name: %s", name) return False if resp_json: return json.loads(resp["Parameter"]["Value"]) else: return resp["Parameter"]["Value"]
def test_gte(self): ''' Test thin.gte external call for processing the info about tops per interpreter. :return: ''' assert json.loads(thin.gte()).get('foo') == 'bar'
def get_parameter(name, withdecryption=False, resp_json=False, region=None, key=None, keyid=None, profile=None): ''' Retrives a parameter from SSM Parameter Store .. versionadded:: Neon .. code-block:: text salt-call boto_ssm.get_parameter test-param withdescription=True ''' conn = __utils__['boto3.get_connection']('ssm', region=region, key=key, keyid=keyid, profile=profile) try: resp = conn.get_parameter(Name=name, WithDecryption=withdecryption) except conn.exceptions.ParameterNotFound: log.warning("get_parameter: Unable to locate name: %s", name) return False if resp_json: return json.loads(resp['Parameter']['Value']) else: return resp['Parameter']['Value']
def discover(self): ''' Gather the information of currently declared servers. :return: ''' self.log.info("Looking for a server discovery") response = {} try: self._query() self._collect_masters_map(response) except socket.timeout: msg = 'No master has been discovered.' self.log.info(msg) masters = {} for addr, descriptions in response.items(): for data in descriptions: # Several masters can run at the same machine. msg = data.decode() if msg.startswith(self.signature): msg = msg.split(self.signature)[-1] self.log.debug("Service announcement at '{0}'. Response: '{1}'".format("{}:{}".format(*addr), msg)) if ':E:' in msg: err = msg.split(':E:')[-1] self.log.error('Error response from the service publisher at {0}: {1}'.format(addr, err)) if "timestamp" in err: self.log.error('Publisher sent shifted timestamp from {0}'.format(addr)) else: if addr not in masters: masters[addr] = [] masters[addr].append(json.loads(msg.split(':@:')[-1])) return masters
def _interface_format(interfaces_data): ''' Formats interfaces from SLS file into valid JSON usable for zabbix API. Completes JSON with default values. :param interfaces_data: list of interfaces data from SLS file ''' if not interfaces_data: return list() interface_attrs = ('ip', 'dns', 'main', 'type', 'useip', 'port') interfaces_json = loads(dumps(interfaces_data)) interfaces_dict = dict() for interface in interfaces_json: for intf in interface: intf_name = intf interfaces_dict[intf_name] = dict() for intf_val in interface[intf]: for key, value in intf_val.items(): if key in interface_attrs: interfaces_dict[intf_name][key] = value interfaces_list = list() interface_ports = { 'agent': ['1', '10050'], 'snmp': ['2', '161'], 'ipmi': ['3', '623'], 'jmx': ['4', '12345'] } for key, value in interfaces_dict.items(): # Load interface values or default values interface_type = interface_ports[value['type'].lower()][0] main = '1' if six.text_type(value.get( 'main', 'true')).lower() == 'true' else '0' useip = '1' if six.text_type(value.get( 'useip', 'true')).lower() == 'true' else '0' interface_ip = value.get('ip', '') dns = value.get('dns', key) port = six.text_type( value.get('port', interface_ports[value['type'].lower()][1])) interfaces_list.append({ 'type': interface_type, 'main': main, 'useip': useip, 'ip': interface_ip, 'dns': dns, 'port': port }) interfaces_list = interfaces_list interfaces_list_sorted = sorted(interfaces_list, key=lambda k: k['main'], reverse=True) return interfaces_list_sorted
def test_get_salt_call_script(self): ''' Test get salt-call script rendered. :return: ''' out = thin._get_salt_call('foo', 'bar', py26=[2, 6], py27=[2, 7], py34=[3, 4]) for line in salt.utils.stringutils.to_str(out).split(os.linesep): if line.startswith('namespaces = {'): data = json.loads(line.replace('namespaces = ', '').strip()) assert data.get('py26') == [2, 6] assert data.get('py27') == [2, 7] assert data.get('py34') == [3, 4] if line.startswith('syspaths = '): data = json.loads(line.replace('syspaths = ', '')) assert data == ['foo', 'bar']
def _load_file_salt_rendered(self, filepath): ''' loads in one test file ''' # use the salt renderer module to interpret jinja and etc tests = _render_file(filepath) # use json as a convenient way to convert the OrderedDicts from salt renderer mydict = loads(dumps(tests), object_pairs_hook=OrderedDict) for key, value in mydict.items(): self.test_dict[key] = value return
def _media_format(medias_data): ''' Formats medias from SLS file into valid JSON usable for zabbix API. Completes JSON with default values. :param medias_data: list of media data from SLS file ''' if not medias_data: return list() medias_json = loads(dumps(medias_data)) medias_attr = ('active', 'mediatype', 'period', 'severity', 'sendto') media_type = {'mail': 1, 'jabber': 2, 'sms': 3} media_severities = ('D', 'H', 'A', 'W', 'I', 'N') medias_dict = dict() for media in medias_json: for med in media: medias_dict[med] = dict() for medattr in media[med]: for key, value in medattr.items(): if key in medias_attr: medias_dict[med][key] = value medias_list = list() for key, value in medias_dict.items(): # Load media values or default values active = '0' if six.text_type(value.get( 'active', 'true')).lower() == 'true' else '1' mediatype_sls = six.text_type(value.get('mediatype', 'mail')).lower() mediatypeid = six.text_type(media_type.get(mediatype_sls, 1)) period = value.get('period', '1-7,00:00-24:00') sendto = value.get('sendto', key) severity_sls = value.get('severity', 'HD') severity_bin = six.text_type() for sev in media_severities: if sev in severity_sls: severity_bin += '1' else: severity_bin += '0' severity = six.text_type(int(severity_bin, 2)) medias_list.append({ 'active': active, 'mediatypeid': mediatypeid, 'period': period, 'sendto': sendto, 'severity': severity }) return medias_list
def _stdout_parse_json(stdout): ''' Parses JSON output from ovs-vsctl and returns the corresponding object tree. Args: stdout: Output that shall be parsed. Returns: Object represented by the output. ''' obj = json.loads(stdout) return _convert_json(obj)
def render(json_data, saltenv="base", sls="", **kws): """ Accepts JSON as a string or as a file object and runs it through the JSON parser. :rtype: A Python data structure """ if not isinstance(json_data, six.string_types): json_data = json_data.read() if json_data.startswith("#!"): json_data = json_data[(json_data.find("\n") + 1):] if not json_data.strip(): return {} return json.loads(json_data)
def render(json_data, saltenv='base', sls='', **kws): ''' Accepts JSON as a string or as a file object and runs it through the JSON parser. :rtype: A Python data structure ''' if not isinstance(json_data, six.string_types): json_data = json_data.read() if json_data.startswith('#!'): json_data = json_data[(json_data.find('\n') + 1):] if not json_data.strip(): return {} return json.loads(json_data)
def add_test_files_for_sls(self, sls_name, check_all=False): ''' Detects states used, caches needed files, and adds to test list ''' salt_ssh = False if 'running_data/var/run/salt-minion.pid' in __opts__.get('pidfile', False): salt_ssh = True log.debug('Running on salt-ssh minion. Reading file %s', sls_name) cp_output_file = os.path.join(__opts__['cachedir'], 'files', self.saltenv, 'cp_output.txt') with salt.utils.files.fopen(cp_output_file, 'r') as fp: all_states = loads(salt.utils.stringutils.to_unicode(fp.read())) else: all_states = self._get_states() ret = [] cached_copied_files = [] if salt_ssh: # populate cached_copied_files from sent over file rather than attempting to run cp.cache_dir later log.debug('Running on salt-ssh minion. Populating test file results') state_copy_file = os.path.join(__opts__['cachedir'], 'files', self.saltenv, sls_name + '.copy') try: with salt.utils.files.fopen(state_copy_file, 'r') as fp: cached_copied_files.extend(loads(salt.utils.stringutils.to_unicode(fp.read()))) except IOError: # likely attempting to find state.nested.copy when file was sent as just state.copy sls_name_list = sls_name.split('.') sls_root_name = '.'.join(sls_name_list[:-1]) state_copy_file = os.path.join(__opts__['cachedir'], 'files', self.saltenv, sls_root_name + '.copy') with salt.utils.files.fopen(state_copy_file, 'r') as fp: cached_copied_files.extend(loads(salt.utils.stringutils.to_unicode(fp.read()))) if sls_name in all_states: if salt_ssh: log.debug('Running on salt-ssh minion. Reading file %s', sls_name + '.low') state_low_file = os.path.join(__opts__['cachedir'], 'files', self.saltenv, sls_name + '.low') with salt.utils.files.fopen(state_low_file, 'r') as fp: ret = loads(salt.utils.stringutils.to_unicode(fp.read())) else: ret = __salt__['state.show_low_sls'](sls_name, saltenv=self.saltenv, test=True) else: # passed name isn't a state, so we'll assume it is a test definition ret = [{'__sls__': sls_name}] for low_data in ret: if not isinstance(low_data, dict): log.error('low data from show_low_sls is not formed as a dict: %s', low_data) return this_cache_ret = None if '__sls__' in low_data: # this low data has an SLS path in it state_name = low_data['__sls__'] for sls_path in self._generate_sls_path(state_name): this_cache_ret = self._copy_state_files(sls_path, state_name, check_all) if this_cache_ret: log.debug('found tests: %s', this_cache_ret) cached_copied_files.extend(this_cache_ret) if salt_ssh: if check_all: # load all tests for this state on ssh minion tst_files = [file_string for file_string in cached_copied_files if file_string.endswith('.tst')] self.test_files.update(tst_files) if not check_all: # in check_all case, tests already added split_sls = low_data['__sls__'].split('.') sls_path_names = set([ os.path.join(os.sep.join(split_sls), os.path.normpath(self.saltcheck_test_location), 'init.tst'), os.path.join(os.sep.join(split_sls[:len(split_sls) - 1]), os.path.normpath(self.saltcheck_test_location), '{0}.tst'.format(split_sls[-1])), os.path.join(split_sls[0], os.path.normpath(self.saltcheck_test_location), os.sep.join(split_sls[1:-1]), '{0}.tst'.format(split_sls[-1])) ]) # for this state, find matching test files and load them cached_copied_files = list(set(cached_copied_files)) for this_cached_test_file in cached_copied_files: if this_cached_test_file.endswith(tuple(sls_path_names)): self.test_files.add(this_cached_test_file) cached_copied_files.remove(this_cached_test_file) log.debug('Adding .tst file: %s', this_cached_test_file)
def _load_return(ret): return json.loads('\n'.join(ret))
def _ordered_dict_to_dict(probes): '''.''' return loads(dumps(probes))
def add_test_files_for_sls(self, sls_name, check_all=False): """ Detects states used, caches needed files, and adds to test list """ salt_ssh = False if "running_data/var/run/salt-minion.pid" in __opts__.get( "pidfile", False): salt_ssh = True log.debug("Running on salt-ssh minion. Reading file %s", sls_name) cp_output_file = os.path.join(__opts__["cachedir"], "files", self.saltenv, "cp_output.txt") with salt.utils.files.fopen(cp_output_file, "r") as fp: all_states = loads(salt.utils.stringutils.to_unicode( fp.read())) else: all_states = self._get_states() ret = [] cached_copied_files = [] if salt_ssh: # populate cached_copied_files from sent over file rather than attempting to run cp.cache_dir later log.debug( "Running on salt-ssh minion. Populating test file results") state_copy_file = os.path.join(__opts__["cachedir"], "files", self.saltenv, sls_name + ".copy") try: with salt.utils.files.fopen(state_copy_file, "r") as fp: cached_copied_files.extend( loads(salt.utils.stringutils.to_unicode(fp.read()))) except OSError: # likely attempting to find state.nested.copy when file was sent as just state.copy sls_name_list = sls_name.split(".") sls_root_name = ".".join(sls_name_list[:-1]) state_copy_file = os.path.join(__opts__["cachedir"], "files", self.saltenv, sls_root_name + ".copy") with salt.utils.files.fopen(state_copy_file, "r") as fp: cached_copied_files.extend( loads(salt.utils.stringutils.to_unicode(fp.read()))) if sls_name in all_states: if salt_ssh: log.debug("Running on salt-ssh minion. Reading file %s", sls_name + ".low") state_low_file = os.path.join(__opts__["cachedir"], "files", self.saltenv, sls_name + ".low") with salt.utils.files.fopen(state_low_file, "r") as fp: ret = loads(salt.utils.stringutils.to_unicode(fp.read())) else: ret = __salt__["state.show_low_sls"](sls_name, saltenv=self.saltenv, test=True) else: # passed name isn't a state, so we'll assume it is a test definition ret = [{"__sls__": sls_name}] for low_data in ret: if not isinstance(low_data, dict): log.error( "low data from show_low_sls is not formed as a dict: %s", low_data) return this_cache_ret = None if "__sls__" in low_data: # this low data has an SLS path in it state_name = low_data["__sls__"] for sls_path in self._generate_sls_path(state_name): this_cache_ret = self._copy_state_files( sls_path, state_name, check_all) if this_cache_ret: log.debug("found tests: %s", this_cache_ret) cached_copied_files.extend(this_cache_ret) if salt_ssh: if check_all: # load all tests for this state on ssh minion tst_files = [ file_string for file_string in cached_copied_files if file_string.endswith(".tst") ] self.test_files.update(tst_files) if not check_all: # in check_all case, tests already added split_sls = low_data["__sls__"].split(".") sls_path_names = { os.path.join( os.sep.join(split_sls), os.path.normpath(self.saltcheck_test_location), "init.tst", ), os.path.join( os.sep.join(split_sls[:len(split_sls) - 1]), os.path.normpath(self.saltcheck_test_location), "{}.tst".format(split_sls[-1]), ), os.path.join( split_sls[0], os.path.normpath(self.saltcheck_test_location), os.sep.join(split_sls[1:-1]), "{}.tst".format(split_sls[-1]), ), } # for this state, find matching test files and load them cached_copied_files = list(set(cached_copied_files)) for this_cached_test_file in cached_copied_files: if this_cached_test_file.endswith( tuple(sls_path_names)): self.test_files.add(this_cached_test_file) cached_copied_files.remove(this_cached_test_file) log.debug("Adding .tst file: %s", this_cached_test_file)
def _ordered_dict_to_dict(probes): '''Mandatory to be dict type in order to be used in the NAPALM Jinja template.''' return loads(dumps(probes))
def _ordered_dict_to_dict(config): ''' Forced the datatype to dict, in case OrderedDict is used. ''' return loads(dumps(config))