Beispiel #1
0
    def test_decode_to_str(self):
        '''
        Companion to test_decode, they should both be kept up-to-date with one
        another.

        NOTE: This uses the lambda "_s" defined above in the global scope,
        which converts the string/bytestring to a str type.
        '''
        expected = [
            _s('unicode_str'),
            _s('питон'), 123, 456.789, True, False, None,
            _s('яйца'), BYTES,
            [123, 456.789,
             _s('спам'), True, False, None,
             _s('яйца'), BYTES],
            (987, 654.321, _s('яйца'), _s('яйца'), None,
             (True, _s('яйца'), BYTES)), {
                 _s('str_key'): _s('str_val'),
                 None: True,
                 123: 456.789,
                 _s('яйца'): BYTES,
                 _s('subdict'): {
                     _s('unicode_key'):
                     _s('яйца'),
                     _s('tuple'):
                     (123, _s('hello'), _s('world'), True, _s('яйца'), BYTES),
                     _s('list'): [456,
                                  _s('спам'), False,
                                  _s('яйца'), BYTES]
                 }
             },
            OrderedDict([(_s('foo'), _s('bar')), (123, 456),
                         (_s('яйца'), BYTES)])
        ]

        ret = salt.utils.data.decode(self.test_data,
                                     keep=True,
                                     normalize=True,
                                     preserve_dict_class=True,
                                     preserve_tuples=True,
                                     to_str=True)
        self.assertEqual(ret, expected)

        if six.PY3:
            # The binary data in the data structure should fail to decode, even
            # using the fallback, and raise an exception.
            self.assertRaises(UnicodeDecodeError,
                              salt.utils.data.decode,
                              self.test_data,
                              keep=False,
                              normalize=True,
                              preserve_dict_class=True,
                              preserve_tuples=True,
                              to_str=True)

        # Now munge the expected data so that we get what we would expect if we
        # disable preservation of dict class and tuples
        expected[10] = [
            987, 654.321,
            _s('яйца'),
            _s('яйца'), None, [True, _s('яйца'), BYTES]
        ]
        expected[11][_s('subdict')][_s('tuple')] = [
            123, _s('hello'),
            _s('world'), True,
            _s('яйца'), BYTES
        ]
        expected[12] = {_s('foo'): _s('bar'), 123: 456, _s('яйца'): BYTES}

        ret = salt.utils.data.decode(self.test_data,
                                     keep=True,
                                     normalize=True,
                                     preserve_dict_class=False,
                                     preserve_tuples=False,
                                     to_str=True)
        self.assertEqual(ret, expected)

        # Now test single non-string, non-data-structure items, these should
        # return the same value when passed to this function
        for item in (123, 4.56, True, False, None):
            log.debug('Testing decode of %s', item)
            self.assertEqual(salt.utils.data.decode(item, to_str=True), item)

        # Test single strings (not in a data structure)
        self.assertEqual(salt.utils.data.decode('foo', to_str=True), _s('foo'))
        self.assertEqual(salt.utils.data.decode(_b('bar'), to_str=True),
                         _s('bar'))

        # Test binary blob
        self.assertEqual(salt.utils.data.decode(BYTES, keep=True, to_str=True),
                         BYTES)
        if six.PY3:
            self.assertRaises(UnicodeDecodeError,
                              salt.utils.data.decode,
                              BYTES,
                              keep=False,
                              to_str=True)
Beispiel #2
0
def ovh_set_nss(domain, dnss, dnssec=False):
    register_checks()
    log.info('{0}: Setting up DNS'.format(domain))
    if not dnss:
        raise ValueError('no dns for {0}'.format(domain))
    _s = __salt__
    crets = OrderedDict()
    client = __salt__['mc_provider.ovh_client'](domain=domain)
    dnssec_ep = '/domain/zone/{0}/dnssec'.format(domain)
    dnssec_status = client.get(dnssec_ep)['status'] != 'disabled'
    if dnssec:
        if not dnssec_status:
            client.post(dnssec_ep)
            log.info('{1}: activated dnssec')
    else:
        if dnssec_status:
            client.delete(dnssec_ep)
            log.info('{1}: deactivated dnssec')
    hosted_status = client.get('/domain/{0}'.format(domain))
    if hosted_status['nameServerType'] == 'hosted':
        client.put('/domain/{0}'.format(domain), nameServerType='external')
    current_nss = {}
    todelete = set()
    skip = []
    crets['skipped'] = OrderedDict()
    for nsid in client.get('/domain/{0}/nameServer'.format(domain)):
        ns = client.get('/domain/{0}/nameServer/{1}'.format(domain, nsid))
        host = ns['host']
        dns = current_nss.setdefault(host, {})
        cns = current_nss[host] = __salt__['mc_utils.dictupdate'](dns, ns)
        if host not in dnss:
            todelete.add(nsid)
        if host in dnss and cns['ip']:
            if cns['ip'] == dnss[host][0]:
                crets['skipped'][host] = cns
                skip.append(host)
            else:
                todelete.add(nsid)

    def remove_remaining(remaining, log=False):
        can_delete = True
        for ns in [a for a in todelete]:
            if not can_delete:
                continue
            ret = client.get('/domain/{0}/nameServer/{1}'.format(domain, ns))
            if ret['toDelete']:
                continue
            try:
                client.delete('/domain/{0}/nameServer/{1}'.format(domain, ns))
                log.info('{1}: deleted ns: {0}'.format(ns, domain))
                todelete.remove(ns)
            except (Exception, ) as exc:
                if log:
                    print(exc)
                    log.error(traceback.format_exc())
                can_delete = False
                continue
        return todelete

    todelete = remove_remaining(todelete)
    for ns, data in six.iteritems(dnss):
        if ns in skip:
            continue
        nameservers = [{'host': ns, 'ip': a} for a in data]
        crets['ns_{0}'.format(ns)] = ret = client.post(
            '/domain/{0}/nameServer'.format(domain), nameServer=nameservers)
        if ret['status'] != 'todo':
            log.error("{0} unexpected result".format(ns))
            log.error(pprint.pformat(ret))
        else:
            log.info('{1}: Created ns: {0}'.format(ns, domain))
        # try to delete remaining deleted servers
        todelete = remove_remaining(todelete)
    todelete = remove_remaining(todelete, log=True)
    if todelete:
        log.error('{0}: {1} were not deleted'.format(ns, todelete))
    return crets
Beispiel #3
0
def gandi_glues(domain, dnss):
    register_checks()
    _s = __salt__
    api, apikey = _s['mc_provider.gandi_client'](domain=domain)
    current_glues = api.domain.host.list(apikey, domain)
    ret = OrderedDict()
    ret['add'] = OrderedDict()
    ret['update'] = OrderedDict()
    ret['delete'] = OrderedDict()
    ret['ns'] = OrderedDict()
    toadd = {}
    toupdate = {}
    todelete = {}
    for a in current_glues:
        if a['name'] not in dnss:
            ips = todelete.setdefault(a['name'], [])
            for ip in a['ips']:
                if ip not in ips:
                    ips.append(ip)
    for a in current_glues:
        if a['name'] in dnss:
            aips = dnss[a['name']][:]
            aips.sort()
            bips = a['ips'][:]
            bips.sort()
            if aips != bips:
                toupdate[a['name']] = aips
    for dns in dnss:
        if dns not in [a['name'] for a in current_glues]:
            toadd[dns] = dnss[dns][:]
    error = False
    for ns, ips in six.iteritems(toadd):
        try:
            ret['add'][ns] = api.domain.host.create(apikey, ns, ips)
            assert ret['add'][ns]['errortype'] is None
        except KeyboardInterrupt:
            raise
        except Exception:
            print(traceback.format_exc())
            error = True
    if not error:
        for ns, ips in six.iteritems(toupdate):
            try:
                ret['update'][ns] = api.domain.host.update(apikey, ns, ips)
                assert ret['update'][ns]['errortype'] is None
            except KeyboardInterrupt:
                raise
            except Exception:
                print(traceback.format_exc())
                error = True
    if not error:
        try:
            ret['ns'] = api.domain.nameservers.set(apikey, domain,
                                                   [a for a in dnss])
            assert ret['ns']['errortype'] is None
        except KeyboardInterrupt:
            raise
        except Exception:
            print(traceback.format_exc())
            error = True
    if not error:
        for ns, ips in six.iteritems(todelete):
            try:
                ret['delete'][ns] = api.domain.host.delete(apikey, ns)
                assert ret['delete'][ns]['errortype'] is None
            except KeyboardInterrupt:
                raise
            except Exception:
                print(traceback.format_exc())
                error = True
    ret['error'] = error
    return ret
Beispiel #4
0
    def process_results(self, rows):
        '''
            This function takes a list of database results and iterates over,
            merging them into a dict form.
        '''
        listify = OrderedDict()
        listify_dicts = OrderedDict()
        for ret in rows:
            # crd is the Current Return Data level, to make this non-recursive.
            crd = self.focus
            # Walk and create dicts above the final layer
            for i in range(0, self.depth - 1):
                # At the end we'll use listify to find values to make a list of
                if i + 1 in self.with_lists:
                    if id(crd) not in listify:
                        listify[id(crd)] = []
                        listify_dicts[id(crd)] = crd
                    if ret[i] not in listify[id(crd)]:
                        listify[id(crd)].append(ret[i])
                if ret[i] not in crd:
                    # Key missing
                    crd[ret[i]] = {}
                    crd = crd[ret[i]]
                else:
                    # Check type of collision
                    ty = type(crd[ret[i]])
                    if ty is list:
                        # Already made list
                        temp = {}
                        crd[ret[i]].append(temp)
                        crd = temp
                    elif ty is not dict:
                        # Not a list, not a dict
                        if self.as_list:
                            # Make list
                            temp = {}
                            crd[ret[i]] = [crd[ret[i]], temp]
                            crd = temp
                        else:
                            # Overwrite
                            crd[ret[i]] = {}
                            crd = crd[ret[i]]
                    else:
                        # dict, descend.
                        crd = crd[ret[i]]

            # If this test is true, the penultimate field is the key
            if self.depth == self.num_fields - 1:
                nk = self.num_fields - 2  # Aka, self.depth-1
                # Should we and will we have a list at the end?
                if ((self.as_list and (ret[nk] in crd))
                        or (nk + 1 in self.with_lists)):
                    if ret[nk] in crd:
                        if not isinstance(crd[ret[nk]], list):
                            crd[ret[nk]] = [crd[ret[nk]]]
                        # if it's already a list, do nothing
                    else:
                        crd[ret[nk]] = []
                    crd[ret[nk]].append(ret[self.num_fields - 1])
                else:
                    if not self.ignore_null or ret[self.num_fields -
                                                   1] is not None:
                        crd[ret[nk]] = ret[self.num_fields - 1]
            else:
                # Otherwise, the field name is the key but we have a spare.
                # The spare results because of {c: d} vs {c: {"d": d, "e": e }}
                # So, make that last dict
                if ret[self.depth - 1] not in crd:
                    crd[ret[self.depth - 1]] = {}
                # This bit doesn't escape listify
                if self.depth in self.with_lists:
                    if id(crd) not in listify:
                        listify[id(crd)] = []
                        listify_dicts[id(crd)] = crd
                    if ret[self.depth - 1] not in listify[id(crd)]:
                        listify[id(crd)].append(ret[self.depth - 1])
                crd = crd[ret[self.depth - 1]]
                # Now for the remaining keys, we put them into the dict
                for i in range(self.depth, self.num_fields):
                    nk = self.field_names[i]
                    # Listify
                    if i + 1 in self.with_lists:
                        if id(crd) not in listify:
                            listify[id(crd)] = []
                            listify_dicts[id(crd)] = crd
                        if nk not in listify[id(crd)]:
                            listify[id(crd)].append(nk)
                    # Collision detection
                    if self.as_list and (nk in crd):
                        # Same as before...
                        if isinstance(crd[nk], list):
                            crd[nk].append(ret[i])
                        else:
                            crd[nk] = [crd[nk], ret[i]]
                    else:
                        if not self.ignore_null or ret[i] is not None:
                            crd[nk] = ret[i]
        # Get key list and work backwards.  This is inner-out processing
        ks = list(listify_dicts.keys())
        ks.reverse()
        for i in ks:
            d = listify_dicts[i]
            for k in listify[i]:
                if isinstance(d[k], dict):
                    d[k] = list(d[k].values())
                elif isinstance(d[k], list):
                    d[k] = [d[k]]
Beispiel #5
0
def list_vms(search=None, verbose=False):
    '''
    List all vms

    search : string
        filter vms, see the execution module
    verbose : boolean
        print additional information about the vm

    CLI Example:

    .. code-block:: bash

        salt-run vmadm.list
        salt-run vmadm.list search='type=KVM'
        salt-run vmadm.list verbose=True
    '''
    ret = OrderedDict() if verbose else []
    client = salt.client.get_local_client(__opts__['conf_file'])
    try:
        vmadm_args = {}
        vmadm_args[
            'order'] = 'uuid,alias,hostname,state,type,cpu_cap,vcpus,ram'
        if search:
            vmadm_args['search'] = search
        for cn in client.cmd_iter('G@virtual:physical and G@os:smartos',
                                  'vmadm.list',
                                  kwarg=vmadm_args,
                                  tgt_type='compound'):
            if not cn:
                continue
            node = next(six.iterkeys(cn))
            if not isinstance(cn[node], dict) or \
                    'ret' not in cn[node] or \
                    not isinstance(cn[node]['ret'], dict):
                continue
            for vm in cn[node]['ret']:
                vmcfg = cn[node]['ret'][vm]
                if verbose:
                    ret[vm] = OrderedDict()
                    ret[vm]['hostname'] = vmcfg['hostname']
                    ret[vm]['alias'] = vmcfg['alias']
                    ret[vm]['computenode'] = node
                    ret[vm]['state'] = vmcfg['state']
                    ret[vm]['resources'] = OrderedDict()
                    ret[vm]['resources']['memory'] = vmcfg['ram']
                    if vmcfg['type'] == 'KVM':
                        ret[vm]['resources']['cpu'] = "{0:.2f}".format(
                            int(vmcfg['vcpus']))
                    else:
                        if vmcfg['cpu_cap'] != '':
                            ret[vm]['resources']['cpu'] = "{0:.2f}".format(
                                int(vmcfg['cpu_cap']) / 100)
                else:
                    ret.append(vm)
    except SaltClientError as client_error:
        return "{0}".format(client_error)

    if not verbose:
        ret = sorted(ret)

    return ret
Beispiel #6
0
def diff(name_a, name_b=None, **kwargs):
    """
    Display the difference between a snapshot of a given filesystem and
    another snapshot of that filesystem from a later time or the current
    contents of the filesystem.

    name_a : string
        name of snapshot
    name_b : string
        (optional) name of snapshot or filesystem
    show_changetime : boolean
        display the path's inode change time as the first column of output. (default = True)
    show_indication : boolean
        display an indication of the type of file. (default = True)
    parsable : boolean
        if true we don't parse the timestamp to a more readable date (default = True)

    .. versionadded:: 2016.3.0

    CLI Example:

    .. code-block:: bash

        salt '*' zfs.diff myzpool/mydataset@yesterday myzpool/mydataset

    """
    ## Configure command
    # NOTE: initialize the defaults
    flags = ["-H"]
    target = []

    # NOTE: set extra config from kwargs
    if kwargs.get("show_changetime", True):
        flags.append("-t")
    if kwargs.get("show_indication", True):
        flags.append("-F")

    # NOTE: update target
    target.append(name_a)
    if name_b:
        target.append(name_b)

    ## Diff filesystem/snapshot
    res = __salt__["cmd.run_all"](
        __utils__["zfs.zfs_command"](
            command="diff",
            flags=flags,
            target=target,
        ),
        python_shell=False,
    )

    if res["retcode"] != 0:
        return __utils__["zfs.parse_command_result"](res)
    else:
        if not kwargs.get("parsable", True) and kwargs.get(
                "show_changetime", True):
            ret = OrderedDict()
            for entry in res["stdout"].splitlines():
                entry = entry.split()
                entry_timestamp = __utils__["dateutils.strftime"](
                    entry[0], "%Y-%m-%d.%H:%M:%S.%f")
                entry_data = "\t\t".join(entry[1:])
                ret[entry_timestamp] = entry_data
        else:
            ret = res["stdout"].splitlines()
        return ret
Beispiel #7
0
def list_all(prefix=None, app=None, owner=None, description_contains=None,
             name_not_contains=None, profile="splunk"):
    '''
    Get all splunk search details. Produces results that can be used to create
    an sls file.

    if app or owner are specified, results will be limited to matching saved
    searches.

    if description_contains is specified, results will be limited to those
    where "description_contains in description" is true if name_not_contains is
    specified, results will be limited to those where "name_not_contains not in
    name" is true.

    If prefix parameter is given, alarm names in the output will be prepended
    with the prefix; alarms that have the prefix will be skipped. This can be
    used to convert existing alarms to be managed by salt, as follows:

    CLI example:

            1. Make a "backup" of all existing searches
                $ salt-call splunk_search.list_all --out=txt | sed "s/local: //" > legacy_searches.sls

            2. Get all searches with new prefixed names
                $ salt-call splunk_search.list_all "prefix=**MANAGED BY SALT** " --out=txt | sed "s/local: //" > managed_searches.sls

            3. Insert the managed searches into splunk
                $ salt-call state.sls managed_searches.sls

            4.  Manually verify that the new searches look right

            5.  Delete the original searches
                $ sed s/present/absent/ legacy_searches.sls > remove_legacy_searches.sls
                $ salt-call state.sls remove_legacy_searches.sls

            6.  Get all searches again, verify no changes
                $ salt-call splunk_search.list_all --out=txt | sed "s/local: //" > final_searches.sls
                $ diff final_searches.sls managed_searches.sls
    '''
    client = _get_splunk(profile)

    # splunklib doesn't provide the default settings for saved searches.
    # so, in order to get the defaults, we create a search with no
    # configuration, get that search, and then delete it. We use its contents
    # as the default settings
    name = "splunk_search.list_all get defaults"
    try:
        client.saved_searches.delete(name)
    except Exception:
        pass
    search = client.saved_searches.create(name, search="nothing")
    defaults = dict(search.content)
    client.saved_searches.delete(name)

    # stuff that splunk returns but that you should not attempt to set.
    # cf http://dev.splunk.com/view/python-sdk/SP-CAAAEK2
    readonly_keys = set([
        "triggered_alert_count",
        "action.email",
        "action.populate_lookup",
        "action.rss",
        "action.script",
        "action.summary_index",
        "qualifiedSearch",
        "next_scheduled_time"
    ])

    results = OrderedDict()
    # sort the splunk searches by name, so we get consistent output
    searches = sorted([(s.name, s) for s in client.saved_searches])
    for name, search in searches:
        if app and search.access.app != app:
            continue
        if owner and search.access.owner != owner:
            continue
        if name_not_contains and name_not_contains in name:
            continue
        if prefix:
            if name.startswith(prefix):
                continue
            name = prefix + name
        # put name in the OrderedDict first
        d = []
        d.append({"name": name})
        # add the rest of the splunk settings, ignoring any defaults
        description = ''
        for (k, v) in sorted(search.content.items()):
            if k in readonly_keys:
                continue
            if k.startswith("display."):
                continue
            if not v:
                continue
            if k in defaults and defaults[k] == v:
                continue
            d.append({k: v})
            if k == 'description':
                description = v
        if description_contains and description_contains not in description:
            continue
        results["manage splunk search " + name] = {"splunk_search.present": d}

    return salt.utils.yaml.safe_dump(results, default_flow_style=False, width=120)
Beispiel #8
0
Author: Bo Maryniuk <*****@*****.**>
'''

from __future__ import absolute_import
from salt.utils.odict import OrderedDict
import os
import logging
log = logging.getLogger(__name__)


__virtualname__ = 'pkgset'

PKG_PLUGINS = OrderedDict([
        ("/usr/lib/zypp/plugins/commit/venv-zyppnotify", "/var/cache/venv-salt-minion/rpmdb.cookie"),
        ("/usr/share/yum-plugins/venv-yumnotify.py",     "/var/cache/venv-salt-minion/rpmdb.cookie"),
        ("/usr/bin/venv-dpkgnotify",                     "/var/cache/venv-salt-minion/dpkg.cookie"),
        ("/usr/lib/zypp/plugins/commit/zyppnotify",      "/var/cache/salt/minion/rpmdb.cookie"),
        ("/usr/share/yum-plugins/yumnotify.py",          "/var/cache/salt/minion/rpmdb.cookie"),
        ("/usr/bin/dpkgnotify",                          "/var/cache/salt/minion/dpkg.cookie")
    ])
COOKIE_PATH = None


def __virtual__():
    return any(
               os.path.exists(plug) for plug in PKG_PLUGINS
           ) and __virtualname__ or False


def validate(config):
    '''
    Validate the beacon configuration. A "cookie" file path is mandatory.
Beispiel #9
0
def managed(name, entries, connect_spec=None):
    '''Ensure the existence (or not) of LDAP entries and their attributes

    Example:

    .. code-block:: yaml

        ldapi:///:
          ldap.managed:
            - connect_spec:
                bind:
                  method: sasl

            - entries:

              # make sure the entry doesn't exist
              - cn=foo,ou=users,dc=example,dc=com:
                - delete_others: True

              # make sure the entry exists with only the specified
              # attribute values
              - cn=admin,dc=example,dc=com:
                - delete_others: True
                - replace:
                    cn:
                      - admin
                    description:
                      - LDAP administrator
                    objectClass:
                      - simpleSecurityObject
                      - organizationalRole
                    userPassword:
                      - {{pillar.ldap_admin_password}}

              # make sure the entry exists, its olcRootDN attribute
              # has only the specified value, the olcRootDN attribute
              # doesn't exist, and all other attributes are ignored
              - 'olcDatabase={1}hdb,cn=config':
                - replace:
                    olcRootDN:
                      - cn=admin,dc=example,dc=com
                    # the admin entry has its own password attribute
                    olcRootPW: []

              # note the use of 'default'.  also note how you don't
              # have to use list syntax if there is only one attribute
              # value
              - cn=foo,ou=users,dc=example,dc=com:
                - delete_others: True
                - default:
                    userPassword: changeme
                    shadowLastChange: 0
                    # keep sshPublicKey if present, but don't create
                    # the attribute if it is missing
                    sshPublicKey: []
                - replace:
                    cn: foo
                    uid: foo
                    uidNumber: 1000
                    gidNumber: 1000
                    gecos: Foo Bar
                    givenName: Foo
                    sn: Bar
                    homeDirectory: /home/foo
                    loginShell: /bin/bash
                    objectClass:
                      - inetOrgPerson
                      - posixAccount
                      - top
                      - ldapPublicKey
                      - shadowAccount

    :param name:
        The URL of the LDAP server.  This is ignored if
        ``connect_spec`` is either a connection object or a dict with
        a ``'url'`` entry.

    :param entries:
        A description of the desired state of zero or more LDAP
        entries.

        ``entries`` is an iterable of dicts.  Each of these dict's
        keys are the distinguished names (DNs) of LDAP entries to
        manage.  Each of these dicts is processed in order.  A later
        dict can reference an LDAP entry that was already mentioned in
        an earlier dict, which makes it possible for later dicts to
        enhance or alter the desired state of an LDAP entry.

        The DNs are mapped to a description of the LDAP entry's
        desired state.  These LDAP entry descriptions are themselves
        iterables of dicts.  Each dict in the iterable is processed in
        order.  They contain directives controlling the entry's state.
        The key names the directive type and the value is state
        information for the directive.  The specific structure of the
        state information depends on the directive type.

        The structure of ``entries`` looks like this::

            [{dn1: [{directive1: directive1_state,
                     directive2: directive2_state},
                    {directive3: directive3_state}],
              dn2: [{directive4: directive4_state,
                     directive5: directive5_state}]},
             {dn3: [{directive6: directive6_state}]}]

        These are the directives:

        * ``'delete_others'``
            Boolean indicating whether to delete attributes not
            mentioned in this dict or any of the other directive
            dicts for this DN.  Defaults to ``False``.

            If you don't want to delete an attribute if present, but
            you also don't want to add it if it is missing or modify
            it if it is present, you can use either the ``'default'``
            directive or the ``'add'`` directive with an empty value
            list.

        * ``'default'``
            A dict mapping an attribute name to an iterable of default
            values for that attribute.  If the attribute already
            exists, it is left alone.  If not, it is created using the
            given list of values.

            An empty value list is useful when you don't want to
            create an attribute if it is missing but you do want to
            preserve it if the ``'delete_others'`` key is ``True``.

        * ``'add'``
            Attribute values to add to the entry.  This is a dict
            mapping an attribute name to an iterable of values to add.

            An empty value list is useful when you don't want to
            create an attribute if it is missing but you do want to
            preserve it if the ``'delete_others'`` key is ``True``.

        * ``'delete'``
            Attribute values to remove from the entry.  This is a dict
            mapping an attribute name to an iterable of values to
            delete from the attribute.  If the iterable is empty, all
            of the attribute's values are deleted.

        * ``'replace'``
            Attributes to replace.  This is a dict mapping an
            attribute name to an iterable of values.  Any existing
            values for the attribute are deleted, then the given
            values are added.  The iterable may be empty.

        In the above directives, the iterables of attribute values may
        instead be ``None``, in which case an empty list is used, or a
        scalar such as a string or number, in which case a new list
        containing the scalar is used.

        Note that if all attribute values are removed from an entry,
        the entire entry is deleted.

    :param connect_spec:
        See the description of the ``connect_spec`` parameter of the
        :py:func:`ldap3.connect <salt.modules.ldap3.connect>` function
        in the :py:mod:`ldap3 <salt.modules.ldap3>` execution module.
        If this is a dict and the ``'url'`` entry is not specified,
        the ``'url'`` entry is set to the value of the ``name``
        parameter.

    :returns:
        A dict with the following keys:

        * ``'name'``
            This is the same object passed to the ``name`` parameter.

        * ``'changes'``
            This is a dict describing the changes made (or, in test
            mode, the changes that would have been attempted).  If no
            changes were made (or no changes would have been
            attempted), then this dict is empty.  Only successful
            changes are included.

            Each key is a DN of an entry that was changed (or would
            have been changed).  Entries that were not changed (or
            would not have been changed) are not included.  The value
            is a dict with two keys:

            * ``'old'``
                The state of the entry before modification.  If the
                entry did not previously exist, this key maps to
                ``None``.  Otherwise, the value is a dict mapping each
                of the old entry's attributes to a list of its values
                before any modifications were made.  Unchanged
                attributes are excluded from this dict.

            * ``'new'``
                The state of the entry after modification.  If the
                entry was deleted, this key maps to ``None``.
                Otherwise, the value is a dict mapping each of the
                entry's attributes to a list of its values after the
                modifications were made.  Unchanged attributes are
                excluded from this dict.

            Example ``'changes'`` dict where a new entry was created
            with a single attribute containing two values::

                {'dn1': {'old': None,
                         'new': {'attr1': ['val1', 'val2']}}}

            Example ``'changes'`` dict where a new attribute was added
            to an existing entry::

                {'dn1': {'old': {},
                         'new': {'attr2': ['val3']}}}

        * ``'result'``
            One of the following values:

            * ``True`` if no changes were necessary or if all changes
              were applied successfully.
            * ``False`` if at least one change was unable to be applied.
            * ``None`` if changes would be applied but it is in test
              mode.
    '''
    if connect_spec is None:
        connect_spec = {}
    try:
        connect_spec.setdefault('url', name)
    except AttributeError:
        # already a connection object
        pass

    connect = __salt__['ldap3.connect']

    # hack to get at the ldap3 module to access the ldap3.LDAPError
    # exception class.  https://github.com/saltstack/usystem/issues/27578
    ldap3 = inspect.getmodule(connect)

    with connect(connect_spec) as l:

        old, new = _process_entries(l, entries)

        # collect all of the affected entries (only the key is
        # important in this dict; would have used an OrderedSet if
        # there was one)
        dn_set = OrderedDict()
        dn_set.update(old)
        dn_set.update(new)

        # do some cleanup
        dn_to_delete = set()
        for dn in dn_set:
            o = old.get(dn, {})
            n = new.get(dn, {})
            for x in o, n:
                to_delete = set()
                for attr, vals in six.iteritems(x):
                    if not len(vals):
                        # clean out empty attribute lists
                        to_delete.add(attr)
                for attr in to_delete:
                    del x[attr]
            if o == n:
                # clean out unchanged entries
                dn_to_delete.add(dn)
        for dn in dn_to_delete:
            for x in old, new:
                x.pop(dn, None)
            del dn_set[dn]

        ret = {
            'name': name,
            'changes': {},
            'result': None,
            'comment': '',
        }

        if old == new:
            ret['comment'] = 'LDAP entries already set'
            ret['result'] = True
            return ret

        if __opts__['test']:
            ret['comment'] = 'Would change LDAP entries'
            changed_old = old
            changed_new = new
            success_dn_set = dn_set
        else:
            # execute the changes
            changed_old = OrderedDict()
            changed_new = OrderedDict()
            # assume success; these will be changed on error
            ret['result'] = True
            ret['comment'] = 'Successfully updated LDAP entries'
            errs = []
            success_dn_set = OrderedDict()
            for dn in dn_set:
                o = old.get(dn, {})
                n = new.get(dn, {})

                try:
                    # perform the operation
                    if len(o):
                        if len(n):
                            op = 'modify'
                            assert o != n
                            __salt__['ldap3.change'](l, dn, o, n)
                        else:
                            op = 'delete'
                            __salt__['ldap3.delete'](l, dn)
                    else:
                        op = 'add'
                        assert len(n)
                        __salt__['ldap3.add'](l, dn, n)

                    # update these after the op in case an exception
                    # is raised
                    changed_old[dn] = o
                    changed_new[dn] = n
                    success_dn_set[dn] = True
                except ldap3.LDAPError as err:
                    log.exception('failed to %s entry %s (%s)', op, dn, err)
                    errs.append((op, dn, err))
                    continue

            if len(errs):
                ret['result'] = False
                ret['comment'] = 'failed to ' \
                                 + ', '.join((op + ' entry ' + dn + '(' + six.text_type(err) + ')'
                                              for op, dn, err in errs))

    # set ret['changes'].  filter out any unchanged attributes, and
    # convert the value sets to lists before returning them to the
    # user (sorted for easier comparisons)
    for dn in success_dn_set:
        o = changed_old.get(dn, {})
        n = changed_new.get(dn, {})
        changes = {}
        ret['changes'][dn] = changes
        for x, xn in ((o, 'old'), (n, 'new')):
            if not len(x):
                changes[xn] = None
                continue
            changes[xn] = dict(((attr, sorted(vals))
                                for attr, vals in six.iteritems(x)
                                if o.get(attr, ()) != n.get(attr, ())))

    return ret
Beispiel #10
0
class DataTestCase(TestCase):
    test_data = [
        'unicode_str',
        _b('питон'), 123, 456.789, True, False, None,
        [123, 456.789, _b('спам'), True, False, None],
        (987, 654.321, _b('яйца'), None, (True, False)), {
            _b('str_key'): _b('str_val'),
            None: True,
            123: 456.789,
            _b('subdict'): {
                'unicode_key': 'unicode_val',
                _b('tuple'): (123, 'hello', _b('world'), True),
                _b('list'): [456, _b('спам'), False]
            }
        },
        OrderedDict([(_b('foo'), 'bar'), (123, 456)])
    ]

    def test_sorted_ignorecase(self):
        test_list = ['foo', 'Foo', 'bar', 'Bar']
        expected_list = ['bar', 'Bar', 'foo', 'Foo']
        self.assertEqual(salt.utils.data.sorted_ignorecase(test_list),
                         expected_list)

    def test_mysql_to_dict(self):
        test_mysql_output = [
            '+----+------+-----------+------+---------+------+-------+------------------+',
            '| Id | User | Host      | db   | Command | Time | State | Info             |',
            '+----+------+-----------+------+---------+------+-------+------------------+',
            '|  7 | root | localhost | NULL | Query   |    0 | init  | show processlist |',
            '+----+------+-----------+------+---------+------+-------+------------------+'
        ]

        ret = salt.utils.data.mysql_to_dict(test_mysql_output, 'Info')
        expected_dict = {
            'show processlist': {
                'Info': 'show processlist',
                'db': 'NULL',
                'State': 'init',
                'Host': 'localhost',
                'Command': 'Query',
                'User': '******',
                'Time': 0,
                'Id': 7
            }
        }

        self.assertDictEqual(ret, expected_dict)

    def test_subdict_match(self):
        test_two_level_dict = {'foo': {'bar': 'baz'}}
        test_two_level_comb_dict = {'foo': {'bar': 'baz:woz'}}
        test_two_level_dict_and_list = {
            'abc': ['def', 'ghi', {
                'lorem': {
                    'ipsum': [{
                        'dolor': 'sit'
                    }]
                }
            }],
        }
        test_three_level_dict = {'a': {'b': {'c': 'v'}}}

        self.assertTrue(
            salt.utils.data.subdict_match(test_two_level_dict, 'foo:bar:baz'))
        # In test_two_level_comb_dict, 'foo:bar' corresponds to 'baz:woz', not
        # 'baz'. This match should return False.
        self.assertFalse(
            salt.utils.data.subdict_match(test_two_level_comb_dict,
                                          'foo:bar:baz'))
        # This tests matching with the delimiter in the value part (in other
        # words, that the path 'foo:bar' corresponds to the string 'baz:woz').
        self.assertTrue(
            salt.utils.data.subdict_match(test_two_level_comb_dict,
                                          'foo:bar:baz:woz'))
        # This would match if test_two_level_comb_dict['foo']['bar'] was equal
        # to 'baz:woz:wiz', or if there was more deep nesting. But it does not,
        # so this should return False.
        self.assertFalse(
            salt.utils.data.subdict_match(test_two_level_comb_dict,
                                          'foo:bar:baz:woz:wiz'))
        # This tests for cases when a key path corresponds to a list. The
        # value part 'ghi' should be successfully matched as it is a member of
        # the list corresponding to key path 'abc'. It is somewhat a
        # duplication of a test within test_traverse_dict_and_list, but
        # salt.utils.data.subdict_match() does more than just invoke
        # salt.utils.traverse_list_and_dict() so this particular assertion is a
        # sanity check.
        self.assertTrue(
            salt.utils.data.subdict_match(test_two_level_dict_and_list,
                                          'abc:ghi'))
        # This tests the use case of a dict embedded in a list, embedded in a
        # list, embedded in a dict. This is a rather absurd case, but it
        # confirms that match recursion works properly.
        self.assertTrue(
            salt.utils.data.subdict_match(test_two_level_dict_and_list,
                                          'abc:lorem:ipsum:dolor:sit'))
        # Test four level dict match for reference
        self.assertTrue(
            salt.utils.data.subdict_match(test_three_level_dict, 'a:b:c:v'))
        self.assertFalse(
            # Test regression in 2015.8 where 'a:c:v' would match 'a:b:c:v'
            salt.utils.data.subdict_match(test_three_level_dict, 'a:c:v'))
        # Test wildcard match
        self.assertTrue(
            salt.utils.data.subdict_match(test_three_level_dict, 'a:*:c:v'))

    def test_traverse_dict(self):
        test_two_level_dict = {'foo': {'bar': 'baz'}}

        self.assertDictEqual({'not_found': 'nope'},
                             salt.utils.data.traverse_dict(
                                 test_two_level_dict, 'foo:bar:baz',
                                 {'not_found': 'nope'}))
        self.assertEqual(
            'baz',
            salt.utils.data.traverse_dict(test_two_level_dict, 'foo:bar',
                                          {'not_found': 'not_found'}))

    def test_traverse_dict_and_list(self):
        test_two_level_dict = {'foo': {'bar': 'baz'}}
        test_two_level_dict_and_list = {
            'foo': ['bar', 'baz', {
                'lorem': {
                    'ipsum': [{
                        'dolor': 'sit'
                    }]
                }
            }]
        }

        # Check traversing too far: salt.utils.data.traverse_dict_and_list() returns
        # the value corresponding to a given key path, and baz is a value
        # corresponding to the key path foo:bar.
        self.assertDictEqual({'not_found': 'nope'},
                             salt.utils.data.traverse_dict_and_list(
                                 test_two_level_dict, 'foo:bar:baz',
                                 {'not_found': 'nope'}))
        # Now check to ensure that foo:bar corresponds to baz
        self.assertEqual(
            'baz',
            salt.utils.data.traverse_dict_and_list(test_two_level_dict,
                                                   'foo:bar',
                                                   {'not_found': 'not_found'}))
        # Check traversing too far
        self.assertDictEqual({'not_found': 'nope'},
                             salt.utils.data.traverse_dict_and_list(
                                 test_two_level_dict_and_list, 'foo:bar',
                                 {'not_found': 'nope'}))
        # Check index 1 (2nd element) of list corresponding to path 'foo'
        self.assertEqual(
            'baz',
            salt.utils.data.traverse_dict_and_list(
                test_two_level_dict_and_list, 'foo:1',
                {'not_found': 'not_found'}))
        # Traverse a couple times into dicts embedded in lists
        self.assertEqual(
            'sit',
            salt.utils.data.traverse_dict_and_list(
                test_two_level_dict_and_list, 'foo:lorem:ipsum:dolor',
                {'not_found': 'not_found'}))

    def test_compare_dicts(self):
        ret = salt.utils.data.compare_dicts(old={'foo': 'bar'},
                                            new={'foo': 'bar'})
        self.assertEqual(ret, {})

        ret = salt.utils.data.compare_dicts(old={'foo': 'bar'},
                                            new={'foo': 'woz'})
        expected_ret = {'foo': {'new': 'woz', 'old': 'bar'}}
        self.assertDictEqual(ret, expected_ret)

    def test_decode(self):
        '''
        NOTE: This uses the lambda "_b" defined above in the global scope,
        which encodes a string to a bytestring, assuming utf-8.
        '''
        expected = [
            'unicode_str', 'питон', 123, 456.789, True, False, None,
            [123, 456.789, 'спам', True, False, None],
            (987, 654.321, 'яйца', None, (True, False)), {
                'str_key': 'str_val',
                None: True,
                123: 456.789,
                'subdict': {
                    'unicode_key': 'unicode_val',
                    'tuple': (123, 'hello', 'world', True),
                    'list': [456, 'спам', False]
                }
            },
            OrderedDict([('foo', 'bar'), (123, 456)])
        ]

        ret = salt.utils.data.decode(self.test_data,
                                     preserve_dict_class=True,
                                     preserve_tuples=True)
        self.assertEqual(ret, expected)

        # Now munge the expected data so that we get what we would expect if we
        # disable preservation of dict class and tuples
        expected[8] = [987, 654.321, 'яйца', None, [True, False]]
        expected[9]['subdict']['tuple'] = [123, 'hello', 'world', True]
        expected[10] = {'foo': 'bar', 123: 456}
        ret = salt.utils.data.decode(self.test_data,
                                     preserve_dict_class=False,
                                     preserve_tuples=False)
        self.assertEqual(ret, expected)

        # Now test single non-string, non-data-structure items, these should
        # return the same value when passed to this function
        for item in (123, 4.56, True, False, None):
            log.debug('Testing decode of %s', item)
            self.assertEqual(salt.utils.data.decode(item), item)

        # Test single strings (not in a data structure)
        self.assertEqual(salt.utils.data.decode('foo'), 'foo')
        self.assertEqual(salt.utils.data.decode(_b('bar')), 'bar')

    @skipIf(NO_MOCK, NO_MOCK_REASON)
    def test_decode_fallback(self):
        '''
        Test fallback to utf-8
        '''
        with patch.object(builtins, '__salt_system_encoding__', 'ascii'):
            self.assertEqual(salt.utils.data.decode(_b('яйца')), 'яйца')

    def test_encode(self):
        '''
        NOTE: This uses the lambda "_b" defined above in the global scope,
        which encodes a string to a bytestring, assuming utf-8.
        '''
        expected = [
            _b('unicode_str'),
            _b('питон'), 123, 456.789, True, False, None,
            [123, 456.789, _b('спам'), True, False, None],
            (987, 654.321, _b('яйца'), None, (True, False)), {
                _b('str_key'): _b('str_val'),
                None: True,
                123: 456.789,
                _b('subdict'): {
                    _b('unicode_key'): _b('unicode_val'),
                    _b('tuple'): (123, _b('hello'), _b('world'), True),
                    _b('list'): [456, _b('спам'), False]
                }
            },
            OrderedDict([(_b('foo'), _b('bar')), (123, 456)])
        ]

        ret = salt.utils.data.encode(self.test_data,
                                     preserve_dict_class=True,
                                     preserve_tuples=True)
        self.assertEqual(ret, expected)

        # Now munge the expected data so that we get what we would expect if we
        # disable preservation of dict class and tuples
        expected[8] = [987, 654.321, _b('яйца'), None, [True, False]]
        expected[9][_b('subdict')][_b('tuple')] = [
            123, _b('hello'), _b('world'), True
        ]
        expected[10] = {_b('foo'): _b('bar'), 123: 456}
        ret = salt.utils.data.encode(self.test_data,
                                     preserve_dict_class=False,
                                     preserve_tuples=False)
        self.assertEqual(ret, expected)

        # Now test single non-string, non-data-structure items, these should
        # return the same value when passed to this function
        for item in (123, 4.56, True, False, None):
            log.debug('Testing encode of %s', item)
            self.assertEqual(salt.utils.data.encode(item), item)

        # Test single strings (not in a data structure)
        self.assertEqual(salt.utils.data.encode('foo'), _b('foo'))
        self.assertEqual(salt.utils.data.encode(_b('bar')), _b('bar'))

    @skipIf(NO_MOCK, NO_MOCK_REASON)
    def test_encode_fallback(self):
        '''
        Test fallback to utf-8
        '''
        with patch.object(builtins, '__salt_system_encoding__', 'ascii'):
            self.assertEqual(salt.utils.data.encode('яйца'), _b('яйца'))
        with patch.object(builtins, '__salt_system_encoding__', 'CP1252'):
            self.assertEqual(salt.utils.data.encode('Ψ'), _b('Ψ'))

    def test_repack_dict(self):
        list_of_one_element_dicts = [{
            'dict_key_1': 'dict_val_1'
        }, {
            'dict_key_2': 'dict_val_2'
        }, {
            'dict_key_3': 'dict_val_3'
        }]
        expected_ret = {
            'dict_key_1': 'dict_val_1',
            'dict_key_2': 'dict_val_2',
            'dict_key_3': 'dict_val_3'
        }
        ret = salt.utils.data.repack_dictlist(list_of_one_element_dicts)
        self.assertDictEqual(ret, expected_ret)

        # Try with yaml
        yaml_key_val_pair = '- key1: val1'
        ret = salt.utils.data.repack_dictlist(yaml_key_val_pair)
        self.assertDictEqual(ret, {'key1': 'val1'})

        # Make sure we handle non-yaml junk data
        ret = salt.utils.data.repack_dictlist(LOREM_IPSUM)
        self.assertDictEqual(ret, {})

    def test_stringify(self):
        self.assertRaises(TypeError, salt.utils.data.stringify, 9)
        self.assertEqual(
            salt.utils.data.stringify(
                ['one', 'two', str('three'), 4,
                 5]),  # future lint: disable=blacklisted-function
            ['one', 'two', 'three', '4', '5'])
Beispiel #11
0
 def test_list_nested_odicts(self):
     payload = salt.payload.Serial("msgpack")
     idata = {"pillar": [OrderedDict(environment="dev")]}
     odata = payload.loads(payload.dumps(idata.copy()))
     self.assertNoOrderedDict(odata)
     self.assertEqual(idata, odata)
Beispiel #12
0
def format_call(fun,
                data,
                initial_ret=None,
                expected_extra_kws=(),
                is_class_method=None):
    '''
    Build the required arguments and keyword arguments required for the passed
    function.

    :param fun: The function to get the argspec from
    :param data: A dictionary containing the required data to build the
                 arguments and keyword arguments.
    :param initial_ret: The initial return data pre-populated as dictionary or
                        None
    :param expected_extra_kws: Any expected extra keyword argument names which
                               should not trigger a :ref:`SaltInvocationError`
    :param is_class_method: Pass True if you are sure that the function being passed
                            is a class method. The reason for this is that on Python 3
                            ``inspect.ismethod`` only returns ``True`` for bound methods,
                            while on Python 2, it returns ``True`` for bound and unbound
                            methods. So, on Python 3, in case of a class method, you'd
                            need the class to which the function belongs to be instantiated
                            and this is not always wanted.
    :returns: A dictionary with the function required arguments and keyword
              arguments.
    '''
    ret = initial_ret is not None and initial_ret or {}

    ret['args'] = []
    ret['kwargs'] = OrderedDict()

    aspec = get_function_argspec(fun, is_class_method=is_class_method)

    arg_data = arg_lookup(fun, aspec)
    args = arg_data['args']
    kwargs = arg_data['kwargs']

    # Since we WILL be changing the data dictionary, let's change a copy of it
    data = data.copy()

    missing_args = []

    for key in kwargs:
        try:
            kwargs[key] = data.pop(key)
        except KeyError:
            # Let's leave the default value in place
            pass

    while args:
        arg = args.pop(0)
        try:
            ret['args'].append(data.pop(arg))
        except KeyError:
            missing_args.append(arg)

    if missing_args:
        used_args_count = len(ret['args']) + len(args)
        args_count = used_args_count + len(missing_args)
        raise SaltInvocationError(
            '{0} takes at least {1} argument{2} ({3} given)'.format(
                fun.__name__,
                args_count,
                args_count > 1 and 's' or '',
                used_args_count
            )
        )

    ret['kwargs'].update(kwargs)

    if aspec.keywords:
        # The function accepts **kwargs, any non expected extra keyword
        # arguments will made available.
        for key, value in six.iteritems(data):
            if key in expected_extra_kws:
                continue
            ret['kwargs'][key] = value

        # No need to check for extra keyword arguments since they are all
        # **kwargs now. Return
        return ret

    # Did not return yet? Lets gather any remaining and unexpected keyword
    # arguments
    extra = {}
    for key, value in six.iteritems(data):
        if key in expected_extra_kws:
            continue
        extra[key] = copy.deepcopy(value)

    if extra:
        # Found unexpected keyword arguments, raise an error to the user
        if len(extra) == 1:
            msg = '\'{0[0]}\' is an invalid keyword argument for \'{1}\''.format(
                list(extra.keys()),
                ret.get(
                    # In case this is being called for a state module
                    'full',
                    # Not a state module, build the name
                    '{0}.{1}'.format(fun.__module__, fun.__name__)
                )
            )
        else:
            msg = '{0} and \'{1}\' are invalid keyword arguments for \'{2}\''.format(
                ', '.join(['\'{0}\''.format(e) for e in extra][:-1]),
                list(extra.keys())[-1],
                ret.get(
                    # In case this is being called for a state module
                    'full',
                    # Not a state module, build the name
                    '{0}.{1}'.format(fun.__module__, fun.__name__)
                )
            )

        raise SaltInvocationError(msg)
    return ret
Beispiel #13
0
def default_settings():
    _s = __salt__
    DEFAULTS = {
        'aliased_interfaces': [],
        'default_zone':
        None,
        'aliases':
        FAILOVER_COUNT,
        'banned_networks': [],
        'trusted_networks': [],
        # list of mappings
        'local_networks':
        LOCAL_NETS[:],
        'no_cloud_rules':
        False,
        'no_salt':
        False,
        'no_ping':
        False,
        'no_default_alias':
        False,
        'packages': ['ipset', 'ebtables', 'firewalld'],
        'natted_networks': {
            'docker1': ['10.7.0.0/16'],
            'docker0': ['172.17.0.0/24'],
            'lxcbr1': ['10.5.0.0/16'],
            'lxcbr0': ['10.0.3.0/24'],
            'virt': ['192.168.122.0/24', '10.6.0.0/16'],
            'vibr0': ['192.168.122.0/24', '10.6.0.0/16']
        },
        'zones':
        OrderedDict([('block', {}), ('drop', {}),
                     ('trusted', {
                         'interfaces': ['lo']
                     }), ('dmz', {}), ('rpn', {}),
                     ('virt', {
                         'interfaces': ['virbr0', 'vibr0', 'virbr1', 'vibr1']
                     }), ('lxc', {
                         'interfaces': ['lxcbr0', 'lxcbr1']
                     }), ('docker', {
                         'interfaces': ['docker0', 'docker1']
                     }), ('internal', {
                         'interfaces': []
                     }), ('public', {
                         'interfaces': ['br0', 'eth0', 'em0']
                     }), ('external', {}), ('home', {}), ('work', {})]),
        'internal_zones':
        INTERNAL_ZONES[:],
        'public_zones':
        PUBLIC_ZONES[:],
        'public_services':
        PUBLIC_SERVICES[:],
        'restricted_services': ['snmp'],
        'services': {
            'burp': {
                'port': [{
                    'port': '4971-4974'
                }]
            },
            'dhcp': {
                'port': [{
                    'port': '67-68'
                }]
            },
            'mongodb': {
                'port': [{
                    'port': '27017'
                }]
            },
            # ftp on containers wont use conntrack
            'ftpnc': {
                'port': [{
                    'port': '21'
                }]
            },
            'mumble': {
                'port': [{
                    'port': '64738'
                }]
            },
            'mysql': {
                'port': [{
                    'port': '3306'
                }]
            },
            'postgresql': {
                'port': [{
                    'port': '5432'
                }]
            },
            'rabbitmq': {
                'port': [{
                    'port': '15672'
                }, {
                    'port': '25672'
                }]
            },
            'redis': {
                'port': [{
                    'port': '6379'
                }]
            },
            'salt': {
                'port': [{
                    'port': '4505-4506'
                }]
            },
            'smtps': {
                'port': [{
                    'port': '465'
                }]
            },
            'imap': {
                'port': [{
                    'port': '143'
                }]
            },
            'imaps': {
                'port': [{
                    'port': '993'
                }]
            },
            'http': {
                'port': [{
                    'port': '80'
                }]
            },
            'https': {
                'port': [{
                    'port': '443'
                }]
            }
        },
        #
        'have_rpn':
        _s['mc_provider.have_rpn'](),
        'have_docker':
        _s['mc_network.have_docker_if'](),
        'have_vpn':
        _s['mc_network.have_vpn_if'](),
        'have_lxc':
        _s['mc_network.have_lxc_if'](),
        #
        'permissive_mode':
        is_permissive(),
        'allow_local':
        is_allow_local(),
        'trust_internal':
        None,
        'extra_confs': {
            '/etc/default/firewalld': {},
            '/etc/firewalld.json': {
                'mode': '644'
            },
            '/etc/init.d/firewalld': {
                'mode': '755'
            },
            '/etc/systemd/system/firewalld.service': {
                'mode': '644'
            },
            '/usr/bin/ms_firewalld.py': {
                'mode': '755'
            }
        }
    }
    test_container(DEFAULTS)
    data = _s['mc_utils.defaults'](PREFIX, DEFAULTS)
    if data['trust_internal'] is None:
        data['trust_internal'] = True
    if data['default_zone'] is None:
        if data['public_zones']:
            data['default_zone'] = data['public_zones'][0]
        else:
            data['default_zone'] = PUBLIC_ZONES[0]
    data = complete_rules(data)
    data = add_real_interfaces(data)
    data = add_aliased_interfaces(data)
    return data
Beispiel #14
0
    def test_encode(self):
        '''
        NOTE: This uses the lambda "_b" defined above in the global scope,
        which encodes a string to a bytestring, assuming utf-8.
        '''
        expected = [
            _b('unicode_str'),
            _b('питон'), 123, 456.789, True, False, None,
            _b(EGGS), BYTES,
            [123, 456.789,
             _b('спам'), True, False, None,
             _b(EGGS), BYTES],
            (987, 654.321, _b('яйца'), _b(EGGS), None,
             (True, _b(EGGS), BYTES)), {
                 _b('str_key'): _b('str_val'),
                 None: True,
                 123: 456.789,
                 _b(EGGS): BYTES,
                 _b('subdict'): {
                     _b('unicode_key'):
                     _b(EGGS),
                     _b('tuple'):
                     (123, _b('hello'), _b('world'), True, _b(EGGS), BYTES),
                     _b('list'): [456, _b('спам'), False,
                                  _b(EGGS), BYTES]
                 }
             },
            OrderedDict([(_b('foo'), _b('bar')), (123, 456),
                         (_b(EGGS), BYTES)])
        ]

        # Both keep=True and keep=False should work because the BYTES data is
        # already bytes.
        ret = salt.utils.data.encode(self.test_data,
                                     keep=True,
                                     preserve_dict_class=True,
                                     preserve_tuples=True)
        self.assertEqual(ret, expected)
        ret = salt.utils.data.encode(self.test_data,
                                     keep=False,
                                     preserve_dict_class=True,
                                     preserve_tuples=True)
        self.assertEqual(ret, expected)

        # Now munge the expected data so that we get what we would expect if we
        # disable preservation of dict class and tuples
        expected[10] = [
            987, 654.321,
            _b('яйца'),
            _b(EGGS), None, [True, _b(EGGS), BYTES]
        ]
        expected[11][_b('subdict')][_b('tuple')] = [
            123, _b('hello'),
            _b('world'), True,
            _b(EGGS), BYTES
        ]
        expected[12] = {_b('foo'): _b('bar'), 123: 456, _b(EGGS): BYTES}

        ret = salt.utils.data.encode(self.test_data,
                                     keep=True,
                                     preserve_dict_class=False,
                                     preserve_tuples=False)
        self.assertEqual(ret, expected)
        ret = salt.utils.data.encode(self.test_data,
                                     keep=False,
                                     preserve_dict_class=False,
                                     preserve_tuples=False)
        self.assertEqual(ret, expected)

        # Now test single non-string, non-data-structure items, these should
        # return the same value when passed to this function
        for item in (123, 4.56, True, False, None):
            log.debug('Testing encode of %s', item)
            self.assertEqual(salt.utils.data.encode(item), item)

        # Test single strings (not in a data structure)
        self.assertEqual(salt.utils.data.encode('foo'), _b('foo'))
        self.assertEqual(salt.utils.data.encode(_b('bar')), _b('bar'))

        # Test binary blob, nothing should happen even when keep=False since
        # the data is already bytes
        self.assertEqual(salt.utils.data.encode(BYTES, keep=True), BYTES)
        self.assertEqual(salt.utils.data.encode(BYTES, keep=False), BYTES)
Beispiel #15
0
 def empty(cls):
     cls.states = OrderedDict()
     cls.requisites = []
     cls.includes = []
     cls.extends = OrderedDict()
Beispiel #16
0
def _process_entries(l, entries):
    '''Helper for managed() to process entries and return before/after views

    Collect the current database state and update it according to the
    data in :py:func:`managed`'s ``entries`` parameter.  Return the
    current database state and what it will look like after
    modification.

    :param l:
        the LDAP connection object

    :param entries:
        the same object passed to the ``entries`` parameter of
        :py:func:`manage`

    :return:
        an ``(old, new)`` tuple that describes the current state of
        the entries and what they will look like after modification.
        Each item in the tuple is an OrderedDict that maps an entry DN
        to another dict that maps an attribute name to a set of its
        values (it's a set because according to the LDAP spec,
        attribute value ordering is unspecified and there can't be
        duplicates).  The structure looks like this:

            {dn1: {attr1: set([val1])},
             dn2: {attr1: set([val2]), attr2: set([val3, val4])}}

        All of an entry's attributes and values will be included, even
        if they will not be modified.  If an entry mentioned in the
        entries variable doesn't yet exist in the database, the DN in
        ``old`` will be mapped to an empty dict.  If an entry in the
        database will be deleted, the DN in ``new`` will be mapped to
        an empty dict.  All value sets are non-empty:  An attribute
        that will be added to an entry is not included in ``old``, and
        an attribute that will be deleted frm an entry is not included
        in ``new``.

        These are OrderedDicts to ensure that the user-supplied
        entries are processed in the user-specified order (in case
        there are dependencies, such as ACL rules specified in an
        early entry that make it possible to modify a later entry).
    '''

    old = OrderedDict()
    new = OrderedDict()

    for entries_dict in entries:
        for dn, directives_seq in six.iteritems(entries_dict):
            # get the old entry's state.  first check to see if we've
            # previously processed the entry.
            olde = new.get(dn, None)
            if olde is None:
                # next check the database
                results = __salt__['ldap3.search'](l, dn, 'base')
                if len(results) == 1:
                    attrs = results[dn]
                    olde = dict(((attr, OrderedSet(attrs[attr]))
                                 for attr in attrs if len(attrs[attr])))
                else:
                    # nothing, so it must be a brand new entry
                    assert len(results) == 0
                    olde = {}
                old[dn] = olde
            # copy the old entry to create the new (don't do a simple
            # assignment or else modifications to newe will affect
            # olde)
            newe = copy.deepcopy(olde)
            new[dn] = newe

            # process the directives
            entry_status = {
                'delete_others': False,
                'mentioned_attributes': set(),
            }
            for directives in directives_seq:
                _update_entry(newe, entry_status, directives)
            if entry_status['delete_others']:
                to_delete = set()
                for attr in newe:
                    if attr not in entry_status['mentioned_attributes']:
                        to_delete.add(attr)
                for attr in to_delete:
                    del newe[attr]
    return old, new
Beispiel #17
0
def list_(name=None, **kwargs):
    """
    Return a list of all datasets or a specified dataset on the system and the
    values of their used, available, referenced, and mountpoint properties.

    name : string
        name of dataset, volume, or snapshot
    recursive : boolean
        recursively list children
    depth : int
        limit recursion to depth
    properties : string
        comma-separated list of properties to list, the name property will always be added
    type : string
        comma-separated list of types to display, where type is one of
        filesystem, snapshot, volume, bookmark, or all.
    sort : string
        property to sort on (default = name)
    order : string [ascending|descending]
        sort order (default = ascending)
    parsable : boolean
        display numbers in parsable (exact) values
        .. versionadded:: 2018.3.0

    .. versionadded:: 2015.5.0

    CLI Example:

    .. code-block:: bash

        salt '*' zfs.list
        salt '*' zfs.list myzpool/mydataset [recursive=True|False]
        salt '*' zfs.list myzpool/mydataset properties="sharenfs,mountpoint"

    """
    ret = OrderedDict()

    ## update properties
    # NOTE: properties should be a list
    properties = kwargs.get("properties", "used,avail,refer,mountpoint")
    if not isinstance(properties, list):
        properties = properties.split(",")

    # NOTE: name should be first property
    #       we loop here because there 'name' can be in the list
    #       multiple times.
    while "name" in properties:
        properties.remove("name")
    properties.insert(0, "name")

    ## Configure command
    # NOTE: initialize the defaults
    flags = ["-H"]
    opts = {}

    # NOTE: set extra config from kwargs
    if kwargs.get("recursive", False):
        flags.append("-r")
    if kwargs.get("recursive", False) and kwargs.get("depth", False):
        opts["-d"] = kwargs.get("depth")
    if kwargs.get("type", False):
        opts["-t"] = kwargs.get("type")
    kwargs_sort = kwargs.get("sort", False)
    if kwargs_sort and kwargs_sort in properties:
        if kwargs.get("order", "ascending").startswith("a"):
            opts["-s"] = kwargs_sort
        else:
            opts["-S"] = kwargs_sort
    if isinstance(properties, list):
        # NOTE: There can be only one -o and it takes a comma-separated list
        opts["-o"] = ",".join(properties)
    else:
        opts["-o"] = properties

    ## parse zfs list
    res = __salt__["cmd.run_all"](
        __utils__["zfs.zfs_command"](
            command="list",
            flags=flags,
            opts=opts,
            target=name,
        ),
        python_shell=False,
    )
    if res["retcode"] == 0:
        for ds in res["stdout"].splitlines():
            if kwargs.get("parsable", True):
                ds_data = __utils__["zfs.from_auto_dict"](OrderedDict(
                    list(zip(properties, ds.split("\t")))), )
            else:
                ds_data = __utils__["zfs.to_auto_dict"](
                    OrderedDict(list(zip(properties, ds.split("\t")))),
                    convert_to_human=True,
                )

            ret[ds_data["name"]] = ds_data
            del ret[ds_data["name"]]["name"]
    else:
        return __utils__["zfs.parse_command_result"](res)

    return ret
Beispiel #18
0
    HAS_M2 = False

__virtualname__ = 'x509'

log = logging.getLogger(__name__)

EXT_NAME_MAPPINGS = OrderedDict([
    ('basicConstraints', 'X509v3 Basic Constraints'),
    ('keyUsage', 'X509v3 Key Usage'),
    ('extendedKeyUsage', 'X509v3 Extended Key Usage'),
    ('subjectKeyIdentifier', 'X509v3 Subject Key Identifier'),
    ('authorityKeyIdentifier', 'X509v3 Authority Key Identifier'),
    ('issuserAltName', 'X509v3 Issuer Alternative Name'),
    ('authorityInfoAccess', 'X509v3 Authority Info Access'),
    ('subjectAltName', 'X509v3 Subject Alternative Name'),
    ('crlDistributionPoints', 'X509v3 CRL Distribution Points'),
    ('issuingDistributionPoint', 'X509v3 Issuing Distribution Point'),
    ('certificatePolicies', 'X509v3 Certificate Policies'),
    ('policyConstraints', 'X509v3 Policy Constraints'),
    ('inhibitAnyPolicy', 'X509v3 Inhibit Any Policy'),
    ('nameConstraints', 'X509v3 Name Constraints'),
    ('noCheck', 'X509v3 OCSP No Check'),
    ('nsComment', 'Netscape Comment'),
    ('nsCertType', 'Netscape Certificate Type'),
])

CERT_DEFAULTS = {
    'days_valid': 365,
    'version': 3,
    'serial_bits': 64,
    'algorithm': 'sha256'
Beispiel #19
0
 def _settings():
     locs = __salt__['mc_locations.settings']()
     dns = __salt__['mc_dns.settings']()
     net = __salt__['mc_network.settings']()
     # by default: we listen on localhost + ip on real ifs
     # but not on bridge and etc where it can messes it up
     # with setups with dnsmasq or such
     listen_ifs = ["127.0.0.1"]
     for ifc, ips in six.iteritems(__grains__.get('ip4_interfaces', {})):
         if True in [
                 ifc.startswith(i) for i in
             ['veth', 'lxcbr', 'docker', 'mgc', 'lo', 'vibr', 'xenbr']
         ]:
             continue
         for ip in ips:
             if True in [
                     ip.startswith(s) for s in [
                         '10.',  # makina-states net
                         '172.',  # docker net
                         '192.168.122'  # libvirt/kvm
                     ]
             ]:
                 continue
             if ip not in listen_ifs:
                 listen_ifs.append(ip)
     listen_ifs = ";".join(listen_ifs)
     if not listen_ifs.endswith(';'):
         listen_ifs += ";"
     listen_if6s = ["::1"]
     for ifc, ips in six.iteritems(__grains__.get('ip6_interfaces', {})):
         if True in [
                 ifc.startswith(i) for i in
             ['veth', 'lxcbr', 'docker', 'mgc', 'lo', 'vibr', 'xenbr']
         ]:
             continue
         for ip in ips:
             if True in [ip.startswith(s) for s in [
                     'fe80:',
             ]]:
                 continue
             if ip not in listen_if6s:
                 listen_if6s.append(ip)
     listen_if6s = ";".join(listen_if6s)
     if not listen_if6s:
         listen_if6s = 'any'
     if not listen_if6s.endswith(';'):
         listen_if6s += ";"
     os_defaults = __salt__['grains.filter_by']({
         'Debian': {
             'pkgs': ['bind9', 'bind9utils', 'bind9-host'],
             'transfers_out':
             '20000',
             'transfers_in':
             '20000',
             'root_zones':
             True,
             'forward_only':
             False,
             'forwarders': [],
             'config_dir':
             '{conf_dir}/bind'.format(**locs),
             'bind_config':
             '{conf_dir}/bind/named.conf'.format(**locs),
             'acl_config':
             '{conf_dir}/bind/named.conf.acl'.format(**locs),
             'views_config':
             '{conf_dir}/bind/named.conf.views'.format(**locs),
             'servers_config':
             ('{conf_dir}/bind/named.conf.servers'.format(**locs)),
             'logging_config':
             '{conf_dir}/bind/named.conf.logging'.format(**locs),
             'local_config':
             '{conf_dir}/bind/named.conf.local'.format(**locs),
             'options_config':
             '{conf_dir}/bind/named.conf.options'.format(**locs),
             'key_config':
             '{conf_dir}/bind/named.conf.key'.format(**locs),
             'default_zones_config':
             ('{conf_dir}/bind/named.conf.default-zones').format(**locs),
             'cache_directory':
             '/var/cache/bind',
             'named_directory':
             '/var/cache/bind/zones',
             'dnssec':
             True,
             'user':
             '******',
             'zuser':
             '******',
             'group':
             'bind',
             'service_name':
             'bind9',
         },
         'RedHat': {
             'pkgs': ['bind'],
             'config_dir': '{conf_dir}'.format(**locs),
             'bind_config': '{conf_dir}/named.conf'.format(**locs),
             'local_config': '{conf_dir}/named.conf.local'.format(**locs),
             'cache_directory': '/var/named',
             'named_directory': '/var/named/data',
             'user': '******',
             'group': 'named',
             'service_name': 'named',
         },
     },
                                                grain='os_family',
                                                default='Debian')
     defaults = __salt__['mc_utils.dictupdate'](
         os_defaults,
         {
             'log_dir':
             '/var/log/named',
             "rndc_conf":
             "{conf_dir}/rndc.conf".format(**locs),
             "rndc_key":
             "{conf_dir}/bind/rndc.key".format(**locs),
             'default_views':
             OrderedDict([
                 ('internal', {
                     'match_clients': ['local'],
                     'recursion': 'yes',
                     'additional_from_auth': 'yes',
                     'additional_from_cache': 'yes',
                 }),
                 ('net', {
                     'match_clients': ['!local;any'],
                     'recursion': 'no',
                     'additional_from_auth': 'no',
                     'additional_from_cache': 'no',
                 }),
             ]),
             'ipv4':
             listen_ifs,
             'ipv6':
             listen_if6s,
             'loglevel': {
                 'default': 'error',
                 'general': 'error',
                 'database': 'error',
                 'config': 'error',
                 'security': 'error',
                 'resolver': 'error',
                 'xfer_in': 'info',
                 'xfer_out': 'info',
                 'notify': 'error',
                 'client': 'error',
                 'unmatched': 'error',
                 'queries': 'error',
                 'network': 'error',
                 'dnssec': 'error',
                 'update': 'info',
                 'dispatch': 'error',
                 'lame_servers': 'error',
             },
             'mode':
             '640',
             'view_defaults': {
                 'match_clients': ['any'],
                 'recursion': 'no',
                 'additional_from_auth': 'no',
                 'additional_from_cache': 'no',
             },
             'slaves': [],
             'ttl':
             '330',
             'refresh':
             '300',
             'retry':
             '300',
             'expire':
             '2419200',
             'minimum':
             '299',
             'bind_config_template': ('salt://makina-states/files/'
                                      'etc/bind/named.conf'),
             'servers_config_template': ('salt://makina-states/files/'
                                         'etc/bind/named.conf.servers'),
             'views_config_template': ('salt://makina-states/files/'
                                       'etc/bind/named.conf.views'),
             'acl_config_template': ('salt://makina-states/files/'
                                     'etc/bind/named.conf.acl'),
             'logging_config_template': ('salt://makina-states/files/'
                                         'etc/bind/named.conf.logging'),
             'key_config_template': ('salt://makina-states/files/'
                                     'etc/bind/named.conf.key'),
             'local_config_template': ('salt://makina-states/files/'
                                       'etc/bind/named.conf.local'),
             'options_config_template': ('salt://makina-states/files/'
                                         'etc/bind/named.conf.options'),
             'default_zones_config_template':
             ('salt://makina-states/files/'
              'etc/bind/named.conf.default-zones'),
             'rndc_config_template': ('salt://makina-states/files/'
                                      'etc/rndc.conf'),
             'zone_template': ('salt://makina-states/files/'
                               'etc/bind/pri_zone.zone'),
             #
             'keys':
             OrderedDict(),
             'servers':
             OrderedDict(),
             #
             'views':
             OrderedDict(),
             #
             'acls':
             OrderedDict([
                 ('local', {
                     'clients': get_local_clients(),
                 }),
             ]),
             #
             'zones':
             OrderedDict(),
         })
     defaults['extra_dirs'] = [
         '{0}/zones/master'.format(defaults['config_dir']),
         '{0}/zones/slave'.format(defaults['config_dir']),
     ]
     data = __salt__['mc_utils.defaults']('makina-states.services.dns.bind',
                                          defaults)
     # retrocompat: dns
     data['default_dnses'] = dns['default_dnses']
     data['search'] = dns['search']
     # lighten the data dict for memory purpose
     data['zones'] = [a for a in data['zones']]
     views = [a for a in data['views']]
     data['views'] = []
     for a in views + [b for b in data['default_views']]:
         if a not in data['views']:
             data['views'].append(a)
     for k in [a for a in data]:
         if k.startswith('zones.') or k.startswith('views.'):
             del data[k]
     for k in [a for a in data['servers']]:
         adata = data['servers'][k]
         adata.setdefault('keys', [])
     for k in [a for a in data['acls']]:
         adata = data['acls'][k]
         adata.setdefault('clients', 'any')
     for k in [a for a in data['keys']]:
         kdata = data['keys'][k]
         kdata.setdefault('algorithm', 'HMAC-MD5')
         kdata['secret'] = kdata['secret'].strip()
         if 'secret' not in kdata:
             raise ValueError('no secret for {0}'.format(k))
     return data
Beispiel #20
0
def read_certificate(certificate):
    '''
    Returns a dict containing details of a certificate. Input can be a PEM string or file path.

    certificate:
        The certificate to be read. Can be a path to a certificate file, or a string containing
        the PEM formatted text of the certificate.

    CLI Example:

    .. code-block:: bash

        salt '*' x509.read_certificate /etc/pki/mycert.crt
    '''
    cert = _get_certificate_obj(certificate)

    ret = {
        # X509 Verison 3 has a value of 2 in the field.
        # Version 2 has a value of 1.
        # https://tools.ietf.org/html/rfc5280#section-4.1.2.1
        'Version':
        cert.get_version() + 1,
        # Get size returns in bytes. The world thinks of key sizes in bits.
        'Key Size':
        cert.get_pubkey().size() * 8,
        'Serial Number':
        _dec2hex(cert.get_serial_number()),
        'SHA-256 Finger Print':
        _pretty_hex(cert.get_fingerprint(md='sha256')),
        'MD5 Finger Print':
        _pretty_hex(cert.get_fingerprint(md='md5')),
        'SHA1 Finger Print':
        _pretty_hex(cert.get_fingerprint(md='sha1')),
        'Subject':
        _parse_subject(cert.get_subject()),
        'Subject Hash':
        _dec2hex(cert.get_subject().as_hash()),
        'Issuer':
        _parse_subject(cert.get_issuer()),
        'Issuer Hash':
        _dec2hex(cert.get_issuer().as_hash()),
        'Not Before':
        cert.get_not_before().get_datetime().strftime('%Y-%m-%d %H:%M:%S'),
        'Not After':
        cert.get_not_after().get_datetime().strftime('%Y-%m-%d %H:%M:%S'),
        'Public Key':
        get_public_key(cert)
    }

    exts = OrderedDict()
    for ext_index in range(0, cert.get_ext_count()):
        ext = cert.get_ext_at(ext_index)
        name = ext.get_name()
        val = ext.get_value()
        if ext.get_critical():
            val = 'critical ' + val
        exts[name] = val

    if exts:
        ret['X509v3 Extensions'] = exts

    return ret
Beispiel #21
0
def sections():
    sections = OrderedDict()
    sections["general"] = OrderedDict()
    sections["general"]["hostname"] = "myserver.com"
    sections["general"]["port"] = "1234"
    return sections
Beispiel #22
0
            'mode': '0755'
        },
        {
            'require': [{
                'file': '/usr/local/bin'
            }]
        },
        {
            'source': 'salt://debian/files/pydmesg.py'
        },
        {
            'user': '******'
        },
    ]
}
pydmesg_salt_expected = OrderedDict([('/usr/local/bin/pydmesg',
                                      pydmesg_expected)])
pydmesg_kwargs = dict(user='******',
                      group='root',
                      mode='0755',
                      source='salt://debian/files/pydmesg.py')

basic_template = '''#!pyobjects
File.directory('/tmp', mode='1777', owner='root', group='root')
'''

invalid_template = '''#!pyobjects
File.fail('/tmp')
'''

include_template = '''#!pyobjects
include('http')
Beispiel #23
0
def test_msgpack():
    data = OrderedDict([("foo", 1), ("bar", 2), ("baz", True)])
    serialized = msgpack.serialize(data)
    deserialized = msgpack.deserialize(serialized)
    assert deserialized == data, deserialized
Beispiel #24
0
 def test_include(self):
     ret = self.render(include_template)
     self.assertEqual(ret, OrderedDict([
         ('include', ['http']),
     ]))
Beispiel #25
0
 def test_check_state_result(self):
     self.assertFalse(utils.check_state_result(None),
                      "Failed to handle None as an invalid data type.")
     self.assertFalse(utils.check_state_result([]),
                      "Failed to handle an invalid data type.")
     self.assertFalse(utils.check_state_result({}),
                      "Failed to handle an empty dictionary.")
     self.assertFalse(utils.check_state_result({'host1': []}),
                      "Failed to handle an invalid host data structure.")
     test_valid_state = {
         'host1': {
             'test_state': {
                 'result': 'We have liftoff!'
             }
         }
     }
     self.assertTrue(utils.check_state_result(test_valid_state))
     test_valid_false_states = {
         'test1':
         OrderedDict([
             ('host1',
              OrderedDict([
                  ('test_state0', {
                      'result': True
                  }),
                  ('test_state', {
                      'result': False
                  }),
              ])),
         ]),
         'test2':
         OrderedDict([
             ('host1',
              OrderedDict([
                  ('test_state0', {
                      'result': True
                  }),
                  ('test_state', {
                      'result': True
                  }),
              ])),
             ('host2',
              OrderedDict([
                  ('test_state0', {
                      'result': True
                  }),
                  ('test_state', {
                      'result': False
                  }),
              ])),
         ]),
         'test3': ['a'],
         'test4':
         OrderedDict([('asup',
                       OrderedDict([('host1',
                                     OrderedDict([
                                         ('test_state0', {
                                             'result': True
                                         }),
                                         ('test_state', {
                                             'result': True
                                         }),
                                     ])),
                                    ('host2',
                                     OrderedDict([
                                         ('test_state0', {
                                             'result': True
                                         }),
                                         ('test_state', {
                                             'result': False
                                         }),
                                     ]))]))]),
         'test5':
         OrderedDict([('asup',
                       OrderedDict([('host1',
                                     OrderedDict([
                                         ('test_state0', {
                                             'result': True
                                         }),
                                         ('test_state', {
                                             'result': True
                                         }),
                                     ])), ('host2', OrderedDict([]))]))])
     }
     for test, data in six.iteritems(test_valid_false_states):
         self.assertFalse(utils.check_state_result(data),
                          msg='{0} failed'.format(test))
     test_valid_true_states = {
         'test1':
         OrderedDict([
             ('host1',
              OrderedDict([
                  ('test_state0', {
                      'result': True
                  }),
                  ('test_state', {
                      'result': True
                  }),
              ])),
         ]),
         'test3':
         OrderedDict([
             ('host1',
              OrderedDict([
                  ('test_state0', {
                      'result': True
                  }),
                  ('test_state', {
                      'result': True
                  }),
              ])),
             ('host2',
              OrderedDict([
                  ('test_state0', {
                      'result': True
                  }),
                  ('test_state', {
                      'result': True
                  }),
              ])),
         ]),
         'test4':
         OrderedDict([('asup',
                       OrderedDict([('host1',
                                     OrderedDict([
                                         ('test_state0', {
                                             'result': True
                                         }),
                                         ('test_state', {
                                             'result': True
                                         }),
                                     ])),
                                    ('host2',
                                     OrderedDict([
                                         ('test_state0', {
                                             'result': True
                                         }),
                                         ('test_state', {
                                             'result': True
                                         }),
                                     ]))]))]),
         'test2':
         OrderedDict([('host1',
                       OrderedDict([
                           ('test_state0', {
                               'result': None
                           }),
                           ('test_state', {
                               'result': True
                           }),
                       ])),
                      ('host2',
                       OrderedDict([
                           ('test_state0', {
                               'result': True
                           }),
                           ('test_state', {
                               'result': 'abc'
                           }),
                       ]))])
     }
     for test, data in six.iteritems(test_valid_true_states):
         self.assertTrue(utils.check_state_result(data),
                         msg='{0} failed'.format(test))
     test_valid_false_state = {'host1': {'test_state': {'result': False}}}
     self.assertFalse(utils.check_state_result(test_valid_false_state))
Beispiel #26
0
 def __prepare__(metacls, name, bases):
     return OrderedDict()
Beispiel #27
0
def register_dns_masters(only_domains=None, only_providers=None):
    '''
    Use registrar apis to switch the nameservers to the ones
    we manage on salt

    only_domains
        list of domains to act on, if empty all managed domains
        will be checked

    only_providers
        limit action to one or more providers  (gandi, ovh)

    CLI Examples::

        salt-call mc_dns.register_dns_masters only_providers=ovh
        salt-call mc_dns.register_dns_masters only_providers=gandi
        salt-call mc_dns.register_dns_masters foo.net

    '''
    register_checks()
    _s = __salt__
    if isinstance(only_providers, six.string_types):
        only_providers = only_providers.split(',')
    if isinstance(only_domains, six.string_types):
        only_domains = only_domains.split(',')
    if not only_domains:
        only_domains = []
    managed_domains = _s['mc_pillar.query']('managed_dns_zones')[:]
    if not only_domains:
        only_domains = managed_domains
    errors = [a for a in only_domains if a not in managed_domains]
    if errors:
        raise ValueError('{0} are not managed'.format(errors))
    dnss = {}
    for i in _s['mc_pillar.get_nss']()['slaves']:
        dnss[i] = ips_for(i, fail_over=True)
    # for each name server, registrer its glue record
    # in the domain it belongs
    glues = {}
    crets = OrderedDict()
    crets['set_ns'] = OrderedDict()
    for dns in dnss:
        domain = '.'.join(dns.split('.')[-2:])
        glues.setdefault(domain, {})
        glues[domain][dns] = dnss[dns]
    try:
        skipped = _s['mc_pillar.query']('skipped_updated_domains')
    except Exception:
        skipped = []
    for domain, dnss in six.iteritems(glues):
        provider = domain_registrar(domain)
        fun = 'mc_dns.{0}_glues'.format(provider)
        if fun not in _s:
            log.error('Registrar {0} switcher doest exist'
                      ' for {1}'.format(fun, domain))
            continue
        if not _s['mc_provider.get_{0}_opt'.format(provider)]('activated',
                                                              domain=domain):
            log.error('{0} not configured'.format(provider))
            continue
        crets['glues'] = _s[fun](domain, dnss)
    for domain in only_domains:
        if domain in skipped:
            log.error('{0} SKIPPED'.format(domain))
            continue
        provider = domain_registrar(domain)
        fun = 'mc_dns.{0}_set_nss'.format(provider)
        if not _s['mc_provider.get_{0}_opt'.format(provider)]('activated',
                                                              domain=domain):
            log.error('{0} not configured'.format(provider))
            continue
        if fun not in _s:
            log.error('Registrar {0} switcher doest exist'
                      ' for {1}'.format(fun, domain))
            continue
        if only_providers:
            if provider not in only_providers:
                continue
        crets['set_ns'][domain] = _s[fun](domain, dnss)
    return crets
Beispiel #28
0
class Registry:
    """
    The StateRegistry holds all of the states that have been created.
    """

    states = OrderedDict()
    requisites = []
    includes = []
    extends = OrderedDict()
    enabled = True

    @classmethod
    def empty(cls):
        cls.states = OrderedDict()
        cls.requisites = []
        cls.includes = []
        cls.extends = OrderedDict()

    @classmethod
    def include(cls, *args):
        if not cls.enabled:
            return

        cls.includes += args

    @classmethod
    def salt_data(cls):
        states = OrderedDict([(id_, states_)
                              for id_, states_ in cls.states.items()])

        if cls.includes:
            states["include"] = cls.includes

        if cls.extends:
            states["extend"] = OrderedDict([
                (id_, states_) for id_, states_ in cls.extends.items()
            ])

        cls.empty()

        return states

    @classmethod
    def add(cls, id_, state, extend=False):
        if not cls.enabled:
            return

        if extend:
            attr = cls.extends
        else:
            attr = cls.states

        if id_ in attr:
            if state.full_func in attr[id_]:
                raise DuplicateState(
                    "A state with id ''{}'', type ''{}'' exists".format(
                        id_, state.full_func))
        else:
            attr[id_] = OrderedDict()

        # if we have requisites in our stack then add them to the state
        if cls.requisites:
            for req in cls.requisites:
                if req.requisite not in state.kwargs:
                    state.kwargs[req.requisite] = []
                state.kwargs[req.requisite].append(req())

        attr[id_].update(state())

    @classmethod
    def extend(cls, id_, state):
        cls.add(id_, state, extend=True)

    @classmethod
    def make_extend(cls, name):
        return StateExtend(name)

    @classmethod
    def push_requisite(cls, requisite):
        if not cls.enabled:
            return

        cls.requisites.append(requisite)

    @classmethod
    def pop_requisite(cls):
        if not cls.enabled:
            return

        del cls.requisites[-1]
Beispiel #29
0
def get(*dataset, **kwargs):
    """
    Displays properties for the given datasets.

    dataset : string
        name of snapshot(s), filesystem(s), or volume(s)
    properties : string
        comma-separated list of properties to list, defaults to all
    recursive : boolean
        recursively list children
    depth : int
        recursively list children to depth
    fields : string
        comma-separated list of fields to include, the name and property field will always be added
    type : string
        comma-separated list of types to display, where type is one of
        filesystem, snapshot, volume, bookmark, or all.

        .. versionchanged:: Silicon

        type is ignored on Solaris 10 and 11 since not a valid parameter on those platforms

    source : string
        comma-separated list of sources to display. Must be one of the following:
        local, default, inherited, temporary, and none. The default value is all sources.
    parsable : boolean
        display numbers in parsable (exact) values (default = True)
        .. versionadded:: 2018.3.0

    .. note::
        If no datasets are specified, then the command displays properties
        for all datasets on the system.

    .. versionadded:: 2016.3.0

    CLI Example:

    .. code-block:: bash

        salt '*' zfs.get
        salt '*' zfs.get myzpool/mydataset [recursive=True|False]
        salt '*' zfs.get myzpool/mydataset properties="sharenfs,mountpoint" [recursive=True|False]
        salt '*' zfs.get myzpool/mydataset myzpool/myotherdataset properties=available fields=value depth=1

    """
    ## Configure command
    # NOTE: initialize the defaults
    flags = ["-H"]
    opts = {}

    # NOTE: set extra config from kwargs
    if kwargs.get("depth", False):
        opts["-d"] = kwargs.get("depth")
    elif kwargs.get("recursive", False):
        flags.append("-r")
    fields = kwargs.get("fields", "value,source").split(",")
    if "name" in fields:  # ensure name is first
        fields.remove("name")
    if "property" in fields:  # ensure property is second
        fields.remove("property")
    fields.insert(0, "name")
    fields.insert(1, "property")
    opts["-o"] = ",".join(fields)

    if not salt.utils.platform.is_sunos():
        if kwargs.get("type", False):
            opts["-t"] = kwargs.get("type")

    if kwargs.get("source", False):
        opts["-s"] = kwargs.get("source")

    # NOTE: set property_name
    property_name = kwargs.get("properties", "all")

    ## Get properties
    res = __salt__["cmd.run_all"](
        __utils__["zfs.zfs_command"](
            command="get",
            flags=flags,
            opts=opts,
            property_name=property_name,
            target=list(dataset),
        ),
        python_shell=False,
    )

    ret = __utils__["zfs.parse_command_result"](res)
    if res["retcode"] == 0:
        for ds in res["stdout"].splitlines():
            ds_data = OrderedDict(list(zip(fields, ds.split("\t"))))

            if "value" in ds_data:
                if kwargs.get("parsable", True):
                    ds_data["value"] = __utils__["zfs.from_auto"](
                        ds_data["property"],
                        ds_data["value"],
                    )
                else:
                    ds_data["value"] = __utils__["zfs.to_auto"](
                        ds_data["property"],
                        ds_data["value"],
                        convert_to_human=True,
                    )

            if ds_data["name"] not in ret:
                ret[ds_data["name"]] = OrderedDict()

            ret[ds_data["name"]][ds_data["property"]] = ds_data
            del ds_data["name"]
            del ds_data["property"]

    return ret
Beispiel #30
0
class DataTestCase(TestCase):
    test_data = [
        'unicode_str',
        _b('питон'), 123, 456.789, True, False, None, EGGS, BYTES,
        [123, 456.789,
         _b('спам'), True, False, None, EGGS, BYTES],
        (987, 654.321, _b('яйца'), EGGS, None, (True, EGGS, BYTES)), {
            _b('str_key'): _b('str_val'),
            None: True,
            123: 456.789,
            EGGS: BYTES,
            _b('subdict'): {
                'unicode_key': EGGS,
                _b('tuple'): (123, 'hello', _b('world'), True, EGGS, BYTES),
                _b('list'): [456, _b('спам'), False, EGGS, BYTES]
            }
        },
        OrderedDict([(_b('foo'), 'bar'), (123, 456), (EGGS, BYTES)])
    ]

    def test_sorted_ignorecase(self):
        test_list = ['foo', 'Foo', 'bar', 'Bar']
        expected_list = ['bar', 'Bar', 'foo', 'Foo']
        self.assertEqual(salt.utils.data.sorted_ignorecase(test_list),
                         expected_list)

    def test_mysql_to_dict(self):
        test_mysql_output = [
            '+----+------+-----------+------+---------+------+-------+------------------+',
            '| Id | User | Host      | db   | Command | Time | State | Info             |',
            '+----+------+-----------+------+---------+------+-------+------------------+',
            '|  7 | root | localhost | NULL | Query   |    0 | init  | show processlist |',
            '+----+------+-----------+------+---------+------+-------+------------------+'
        ]

        ret = salt.utils.data.mysql_to_dict(test_mysql_output, 'Info')
        expected_dict = {
            'show processlist': {
                'Info': 'show processlist',
                'db': 'NULL',
                'State': 'init',
                'Host': 'localhost',
                'Command': 'Query',
                'User': '******',
                'Time': 0,
                'Id': 7
            }
        }

        self.assertDictEqual(ret, expected_dict)

    def test_subdict_match(self):
        test_two_level_dict = {'foo': {'bar': 'baz'}}
        test_two_level_comb_dict = {'foo': {'bar': 'baz:woz'}}
        test_two_level_dict_and_list = {
            'abc': ['def', 'ghi', {
                'lorem': {
                    'ipsum': [{
                        'dolor': 'sit'
                    }]
                }
            }],
        }
        test_three_level_dict = {'a': {'b': {'c': 'v'}}}

        self.assertTrue(
            salt.utils.data.subdict_match(test_two_level_dict, 'foo:bar:baz'))
        # In test_two_level_comb_dict, 'foo:bar' corresponds to 'baz:woz', not
        # 'baz'. This match should return False.
        self.assertFalse(
            salt.utils.data.subdict_match(test_two_level_comb_dict,
                                          'foo:bar:baz'))
        # This tests matching with the delimiter in the value part (in other
        # words, that the path 'foo:bar' corresponds to the string 'baz:woz').
        self.assertTrue(
            salt.utils.data.subdict_match(test_two_level_comb_dict,
                                          'foo:bar:baz:woz'))
        # This would match if test_two_level_comb_dict['foo']['bar'] was equal
        # to 'baz:woz:wiz', or if there was more deep nesting. But it does not,
        # so this should return False.
        self.assertFalse(
            salt.utils.data.subdict_match(test_two_level_comb_dict,
                                          'foo:bar:baz:woz:wiz'))
        # This tests for cases when a key path corresponds to a list. The
        # value part 'ghi' should be successfully matched as it is a member of
        # the list corresponding to key path 'abc'. It is somewhat a
        # duplication of a test within test_traverse_dict_and_list, but
        # salt.utils.data.subdict_match() does more than just invoke
        # salt.utils.traverse_list_and_dict() so this particular assertion is a
        # sanity check.
        self.assertTrue(
            salt.utils.data.subdict_match(test_two_level_dict_and_list,
                                          'abc:ghi'))
        # This tests the use case of a dict embedded in a list, embedded in a
        # list, embedded in a dict. This is a rather absurd case, but it
        # confirms that match recursion works properly.
        self.assertTrue(
            salt.utils.data.subdict_match(test_two_level_dict_and_list,
                                          'abc:lorem:ipsum:dolor:sit'))
        # Test four level dict match for reference
        self.assertTrue(
            salt.utils.data.subdict_match(test_three_level_dict, 'a:b:c:v'))
        self.assertFalse(
            # Test regression in 2015.8 where 'a:c:v' would match 'a:b:c:v'
            salt.utils.data.subdict_match(test_three_level_dict, 'a:c:v'))
        # Test wildcard match
        self.assertTrue(
            salt.utils.data.subdict_match(test_three_level_dict, 'a:*:c:v'))

    def test_subdict_match_with_wildcards(self):
        '''
        Tests subdict matching when wildcards are used in the expression
        '''
        data = {
            'a': {
                'b': {
                    'ç': 'd',
                    'é': ['eff', 'gee', '8ch'],
                    'ĩ': {
                        'j': 'k'
                    }
                }
            }
        }
        assert salt.utils.data.subdict_match(data, '*:*:*:*')
        assert salt.utils.data.subdict_match(data, 'a:*:*:*')
        assert salt.utils.data.subdict_match(data, 'a:b:*:*')
        assert salt.utils.data.subdict_match(data, 'a:b:ç:*')
        assert salt.utils.data.subdict_match(data, 'a:b:*:d')
        assert salt.utils.data.subdict_match(data, 'a:*:ç:d')
        assert salt.utils.data.subdict_match(data, '*:b:ç:d')
        assert salt.utils.data.subdict_match(data, '*:*:ç:d')
        assert salt.utils.data.subdict_match(data, '*:*:*:d')
        assert salt.utils.data.subdict_match(data, 'a:*:*:d')
        assert salt.utils.data.subdict_match(data, 'a:b:*:ef*')
        assert salt.utils.data.subdict_match(data, 'a:b:*:g*')
        assert salt.utils.data.subdict_match(data, 'a:b:*:j:*')
        assert salt.utils.data.subdict_match(data, 'a:b:*:j:k')
        assert salt.utils.data.subdict_match(data, 'a:b:*:*:k')
        assert salt.utils.data.subdict_match(data, 'a:b:*:*:*')

    def test_traverse_dict(self):
        test_two_level_dict = {'foo': {'bar': 'baz'}}

        self.assertDictEqual({'not_found': 'nope'},
                             salt.utils.data.traverse_dict(
                                 test_two_level_dict, 'foo:bar:baz',
                                 {'not_found': 'nope'}))
        self.assertEqual(
            'baz',
            salt.utils.data.traverse_dict(test_two_level_dict, 'foo:bar',
                                          {'not_found': 'not_found'}))

    def test_traverse_dict_and_list(self):
        test_two_level_dict = {'foo': {'bar': 'baz'}}
        test_two_level_dict_and_list = {
            'foo': ['bar', 'baz', {
                'lorem': {
                    'ipsum': [{
                        'dolor': 'sit'
                    }]
                }
            }]
        }

        # Check traversing too far: salt.utils.data.traverse_dict_and_list() returns
        # the value corresponding to a given key path, and baz is a value
        # corresponding to the key path foo:bar.
        self.assertDictEqual({'not_found': 'nope'},
                             salt.utils.data.traverse_dict_and_list(
                                 test_two_level_dict, 'foo:bar:baz',
                                 {'not_found': 'nope'}))
        # Now check to ensure that foo:bar corresponds to baz
        self.assertEqual(
            'baz',
            salt.utils.data.traverse_dict_and_list(test_two_level_dict,
                                                   'foo:bar',
                                                   {'not_found': 'not_found'}))
        # Check traversing too far
        self.assertDictEqual({'not_found': 'nope'},
                             salt.utils.data.traverse_dict_and_list(
                                 test_two_level_dict_and_list, 'foo:bar',
                                 {'not_found': 'nope'}))
        # Check index 1 (2nd element) of list corresponding to path 'foo'
        self.assertEqual(
            'baz',
            salt.utils.data.traverse_dict_and_list(
                test_two_level_dict_and_list, 'foo:1',
                {'not_found': 'not_found'}))
        # Traverse a couple times into dicts embedded in lists
        self.assertEqual(
            'sit',
            salt.utils.data.traverse_dict_and_list(
                test_two_level_dict_and_list, 'foo:lorem:ipsum:dolor',
                {'not_found': 'not_found'}))

    def test_compare_dicts(self):
        ret = salt.utils.data.compare_dicts(old={'foo': 'bar'},
                                            new={'foo': 'bar'})
        self.assertEqual(ret, {})

        ret = salt.utils.data.compare_dicts(old={'foo': 'bar'},
                                            new={'foo': 'woz'})
        expected_ret = {'foo': {'new': 'woz', 'old': 'bar'}}
        self.assertDictEqual(ret, expected_ret)

    def test_decode(self):
        '''
        Companion to test_decode_to_str, they should both be kept up-to-date
        with one another.

        NOTE: This uses the lambda "_b" defined above in the global scope,
        which encodes a string to a bytestring, assuming utf-8.
        '''
        expected = [
            'unicode_str', 'питон', 123, 456.789, True, False, None, 'яйца',
            BYTES, [123, 456.789, 'спам', True, False, None, 'яйца', BYTES],
            (987, 654.321, 'яйца', 'яйца', None, (True, 'яйца', BYTES)), {
                'str_key': 'str_val',
                None: True,
                123: 456.789,
                'яйца': BYTES,
                'subdict': {
                    'unicode_key': 'яйца',
                    'tuple': (123, 'hello', 'world', True, 'яйца', BYTES),
                    'list': [456, 'спам', False, 'яйца', BYTES]
                }
            },
            OrderedDict([('foo', 'bar'), (123, 456), ('яйца', BYTES)])
        ]

        ret = salt.utils.data.decode(self.test_data,
                                     keep=True,
                                     normalize=True,
                                     preserve_dict_class=True,
                                     preserve_tuples=True)
        self.assertEqual(ret, expected)

        # The binary data in the data structure should fail to decode, even
        # using the fallback, and raise an exception.
        self.assertRaises(UnicodeDecodeError,
                          salt.utils.data.decode,
                          self.test_data,
                          keep=False,
                          normalize=True,
                          preserve_dict_class=True,
                          preserve_tuples=True)

        # Now munge the expected data so that we get what we would expect if we
        # disable preservation of dict class and tuples
        expected[10] = [
            987, 654.321, 'яйца', 'яйца', None, [True, 'яйца', BYTES]
        ]
        expected[11]['subdict']['tuple'] = [
            123, 'hello', 'world', True, 'яйца', BYTES
        ]
        expected[12] = {'foo': 'bar', 123: 456, 'яйца': BYTES}

        ret = salt.utils.data.decode(self.test_data,
                                     keep=True,
                                     normalize=True,
                                     preserve_dict_class=False,
                                     preserve_tuples=False)
        self.assertEqual(ret, expected)

        # Now test single non-string, non-data-structure items, these should
        # return the same value when passed to this function
        for item in (123, 4.56, True, False, None):
            log.debug('Testing decode of %s', item)
            self.assertEqual(salt.utils.data.decode(item), item)

        # Test single strings (not in a data structure)
        self.assertEqual(salt.utils.data.decode('foo'), 'foo')
        self.assertEqual(salt.utils.data.decode(_b('bar')), 'bar')
        self.assertEqual(salt.utils.data.decode(EGGS, normalize=True), 'яйца')
        self.assertEqual(salt.utils.data.decode(EGGS, normalize=False), EGGS)

        # Test binary blob
        self.assertEqual(salt.utils.data.decode(BYTES, keep=True), BYTES)
        self.assertRaises(UnicodeDecodeError,
                          salt.utils.data.decode,
                          BYTES,
                          keep=False)

    def test_decode_to_str(self):
        '''
        Companion to test_decode, they should both be kept up-to-date with one
        another.

        NOTE: This uses the lambda "_s" defined above in the global scope,
        which converts the string/bytestring to a str type.
        '''
        expected = [
            _s('unicode_str'),
            _s('питон'), 123, 456.789, True, False, None,
            _s('яйца'), BYTES,
            [123, 456.789,
             _s('спам'), True, False, None,
             _s('яйца'), BYTES],
            (987, 654.321, _s('яйца'), _s('яйца'), None,
             (True, _s('яйца'), BYTES)), {
                 _s('str_key'): _s('str_val'),
                 None: True,
                 123: 456.789,
                 _s('яйца'): BYTES,
                 _s('subdict'): {
                     _s('unicode_key'):
                     _s('яйца'),
                     _s('tuple'):
                     (123, _s('hello'), _s('world'), True, _s('яйца'), BYTES),
                     _s('list'): [456,
                                  _s('спам'), False,
                                  _s('яйца'), BYTES]
                 }
             },
            OrderedDict([(_s('foo'), _s('bar')), (123, 456),
                         (_s('яйца'), BYTES)])
        ]

        ret = salt.utils.data.decode(self.test_data,
                                     keep=True,
                                     normalize=True,
                                     preserve_dict_class=True,
                                     preserve_tuples=True,
                                     to_str=True)
        self.assertEqual(ret, expected)

        if six.PY3:
            # The binary data in the data structure should fail to decode, even
            # using the fallback, and raise an exception.
            self.assertRaises(UnicodeDecodeError,
                              salt.utils.data.decode,
                              self.test_data,
                              keep=False,
                              normalize=True,
                              preserve_dict_class=True,
                              preserve_tuples=True,
                              to_str=True)

        # Now munge the expected data so that we get what we would expect if we
        # disable preservation of dict class and tuples
        expected[10] = [
            987, 654.321,
            _s('яйца'),
            _s('яйца'), None, [True, _s('яйца'), BYTES]
        ]
        expected[11][_s('subdict')][_s('tuple')] = [
            123, _s('hello'),
            _s('world'), True,
            _s('яйца'), BYTES
        ]
        expected[12] = {_s('foo'): _s('bar'), 123: 456, _s('яйца'): BYTES}

        ret = salt.utils.data.decode(self.test_data,
                                     keep=True,
                                     normalize=True,
                                     preserve_dict_class=False,
                                     preserve_tuples=False,
                                     to_str=True)
        self.assertEqual(ret, expected)

        # Now test single non-string, non-data-structure items, these should
        # return the same value when passed to this function
        for item in (123, 4.56, True, False, None):
            log.debug('Testing decode of %s', item)
            self.assertEqual(salt.utils.data.decode(item, to_str=True), item)

        # Test single strings (not in a data structure)
        self.assertEqual(salt.utils.data.decode('foo', to_str=True), _s('foo'))
        self.assertEqual(salt.utils.data.decode(_b('bar'), to_str=True),
                         _s('bar'))

        # Test binary blob
        self.assertEqual(salt.utils.data.decode(BYTES, keep=True, to_str=True),
                         BYTES)
        if six.PY3:
            self.assertRaises(UnicodeDecodeError,
                              salt.utils.data.decode,
                              BYTES,
                              keep=False,
                              to_str=True)

    @skipIf(NO_MOCK, NO_MOCK_REASON)
    def test_decode_fallback(self):
        '''
        Test fallback to utf-8
        '''
        with patch.object(builtins, '__salt_system_encoding__', 'ascii'):
            self.assertEqual(salt.utils.data.decode(_b('яйца')), 'яйца')

    def test_encode(self):
        '''
        NOTE: This uses the lambda "_b" defined above in the global scope,
        which encodes a string to a bytestring, assuming utf-8.
        '''
        expected = [
            _b('unicode_str'),
            _b('питон'), 123, 456.789, True, False, None,
            _b(EGGS), BYTES,
            [123, 456.789,
             _b('спам'), True, False, None,
             _b(EGGS), BYTES],
            (987, 654.321, _b('яйца'), _b(EGGS), None,
             (True, _b(EGGS), BYTES)), {
                 _b('str_key'): _b('str_val'),
                 None: True,
                 123: 456.789,
                 _b(EGGS): BYTES,
                 _b('subdict'): {
                     _b('unicode_key'):
                     _b(EGGS),
                     _b('tuple'):
                     (123, _b('hello'), _b('world'), True, _b(EGGS), BYTES),
                     _b('list'): [456, _b('спам'), False,
                                  _b(EGGS), BYTES]
                 }
             },
            OrderedDict([(_b('foo'), _b('bar')), (123, 456),
                         (_b(EGGS), BYTES)])
        ]

        # Both keep=True and keep=False should work because the BYTES data is
        # already bytes.
        ret = salt.utils.data.encode(self.test_data,
                                     keep=True,
                                     preserve_dict_class=True,
                                     preserve_tuples=True)
        self.assertEqual(ret, expected)
        ret = salt.utils.data.encode(self.test_data,
                                     keep=False,
                                     preserve_dict_class=True,
                                     preserve_tuples=True)
        self.assertEqual(ret, expected)

        # Now munge the expected data so that we get what we would expect if we
        # disable preservation of dict class and tuples
        expected[10] = [
            987, 654.321,
            _b('яйца'),
            _b(EGGS), None, [True, _b(EGGS), BYTES]
        ]
        expected[11][_b('subdict')][_b('tuple')] = [
            123, _b('hello'),
            _b('world'), True,
            _b(EGGS), BYTES
        ]
        expected[12] = {_b('foo'): _b('bar'), 123: 456, _b(EGGS): BYTES}

        ret = salt.utils.data.encode(self.test_data,
                                     keep=True,
                                     preserve_dict_class=False,
                                     preserve_tuples=False)
        self.assertEqual(ret, expected)
        ret = salt.utils.data.encode(self.test_data,
                                     keep=False,
                                     preserve_dict_class=False,
                                     preserve_tuples=False)
        self.assertEqual(ret, expected)

        # Now test single non-string, non-data-structure items, these should
        # return the same value when passed to this function
        for item in (123, 4.56, True, False, None):
            log.debug('Testing encode of %s', item)
            self.assertEqual(salt.utils.data.encode(item), item)

        # Test single strings (not in a data structure)
        self.assertEqual(salt.utils.data.encode('foo'), _b('foo'))
        self.assertEqual(salt.utils.data.encode(_b('bar')), _b('bar'))

        # Test binary blob, nothing should happen even when keep=False since
        # the data is already bytes
        self.assertEqual(salt.utils.data.encode(BYTES, keep=True), BYTES)
        self.assertEqual(salt.utils.data.encode(BYTES, keep=False), BYTES)

    def test_encode_keep(self):
        '''
        Whereas we tested the keep argument in test_decode, it is much easier
        to do a more comprehensive test of keep in its own function where we
        can force the encoding.
        '''
        unicode_str = 'питон'
        encoding = 'ascii'

        # Test single string
        self.assertEqual(
            salt.utils.data.encode(unicode_str, encoding, keep=True),
            unicode_str)
        self.assertRaises(UnicodeEncodeError,
                          salt.utils.data.encode,
                          unicode_str,
                          encoding,
                          keep=False)

        data = [
            unicode_str,
            [b'foo', [unicode_str], {
                b'key': unicode_str
            }, (unicode_str, )], {
                b'list': [b'foo', unicode_str],
                b'dict': {
                    b'key': unicode_str
                },
                b'tuple': (b'foo', unicode_str)
            }, ([b'foo', unicode_str], {
                b'key': unicode_str
            }, (unicode_str, ))
        ]

        # Since everything was a bytestring aside from the bogus data, the
        # return data should be identical. We don't need to test recursive
        # decoding, that has already been tested in test_encode.
        self.assertEqual(
            salt.utils.data.encode(data,
                                   encoding,
                                   keep=True,
                                   preserve_tuples=True), data)
        self.assertRaises(UnicodeEncodeError,
                          salt.utils.data.encode,
                          data,
                          encoding,
                          keep=False,
                          preserve_tuples=True)

        for index, item in enumerate(data):
            self.assertEqual(
                salt.utils.data.encode(data[index],
                                       encoding,
                                       keep=True,
                                       preserve_tuples=True), data[index])
            self.assertRaises(UnicodeEncodeError,
                              salt.utils.data.encode,
                              data[index],
                              encoding,
                              keep=False,
                              preserve_tuples=True)

    @skipIf(NO_MOCK, NO_MOCK_REASON)
    def test_encode_fallback(self):
        '''
        Test fallback to utf-8
        '''
        with patch.object(builtins, '__salt_system_encoding__', 'ascii'):
            self.assertEqual(salt.utils.data.encode('яйца'), _b('яйца'))
        with patch.object(builtins, '__salt_system_encoding__', 'CP1252'):
            self.assertEqual(salt.utils.data.encode('Ψ'), _b('Ψ'))

    def test_repack_dict(self):
        list_of_one_element_dicts = [{
            'dict_key_1': 'dict_val_1'
        }, {
            'dict_key_2': 'dict_val_2'
        }, {
            'dict_key_3': 'dict_val_3'
        }]
        expected_ret = {
            'dict_key_1': 'dict_val_1',
            'dict_key_2': 'dict_val_2',
            'dict_key_3': 'dict_val_3'
        }
        ret = salt.utils.data.repack_dictlist(list_of_one_element_dicts)
        self.assertDictEqual(ret, expected_ret)

        # Try with yaml
        yaml_key_val_pair = '- key1: val1'
        ret = salt.utils.data.repack_dictlist(yaml_key_val_pair)
        self.assertDictEqual(ret, {'key1': 'val1'})

        # Make sure we handle non-yaml junk data
        ret = salt.utils.data.repack_dictlist(LOREM_IPSUM)
        self.assertDictEqual(ret, {})

    def test_stringify(self):
        self.assertRaises(TypeError, salt.utils.data.stringify, 9)
        self.assertEqual(
            salt.utils.data.stringify(
                ['one', 'two', str('three'), 4,
                 5]),  # future lint: disable=blacklisted-function
            ['one', 'two', 'three', '4', '5'])

    def test_json_query(self):
        # Raises exception if jmespath module is not found
        with patch('salt.utils.data.jmespath', None):
            self.assertRaisesRegex(RuntimeError, 'requires jmespath',
                                   salt.utils.data.json_query, {}, '@')

        # Test search
        user_groups = {
            'user1': {
                'groups': ['group1', 'group2', 'group3']
            },
            'user2': {
                'groups': ['group1', 'group2']
            },
            'user3': {
                'groups': ['group3']
            },
        }
        expression = '*.groups[0]'
        primary_groups = ['group1', 'group1', 'group3']
        self.assertEqual(
            sorted(salt.utils.data.json_query(user_groups, expression)),
            primary_groups)