Beispiel #1
0
Datei: cp.py Projekt: bryson/salt
def cache_file(path, saltenv='base'):
    '''
    Used to cache a single file on the salt-minion
    Returns the location of the new cached file on the minion

    CLI Example:

    .. code-block:: bash

        salt '*' cp.cache_file salt://path/to/file

    There are two ways of defining the fileserver environment (a.k.a.
    ``saltenv``) from which to cache the file. One is to use the ``saltenv``
    parameter, and the other is to use a querystring syntax in the ``salt://``
    URL. The below two examples are equivalent:

    .. code-block:: bash

        salt '*' cp.cache_file salt://foo/bar.conf saltenv=config
        salt '*' cp.cache_file salt://foo/bar.conf?saltenv=config

    .. note::
        It may be necessary to quote the URL when using the querystring method,
        depending on the shell being used to run the command.
    '''
    contextkey = '{0}_|-{1}_|-{2}'.format('cp.cache_file', path, saltenv)
    path_is_remote = _urlparse(path).scheme in ('http', 'https', 'ftp')
    try:
        if path_is_remote and contextkey in __context__:
            # Prevent multiple caches in the same salt run. Affects remote URLs
            # since the master won't know their hash, so the fileclient
            # wouldn't be able to prevent multiple caches if we try to cache
            # the remote URL more than once.
            if os.path.isfile(__context__[contextkey]):
                return __context__[contextkey]
            else:
                # File is in __context__ but no longer exists in the minion
                # cache, get rid of the context key and re-cache below.
                # Accounts for corner case where file is removed from minion
                # cache between cp.cache_file calls in the same salt-run.
                __context__.pop(contextkey)
    except AttributeError:
        pass

    path, senv = salt.utils.url.split_env(path)
    if senv:
        saltenv = senv

    result = _client().cache_file(path, saltenv)
    if not result:
        log.error(
            'Unable to cache file \'{0}\' from saltenv \'{1}\'.'.format(
                path, saltenv
            )
        )
    if path_is_remote:
        # Cache was successful, store the result in __context__ to prevent
        # multiple caches (see above).
        __context__[contextkey] = result
    return result
Beispiel #2
0
def _source_encode(source, saltenv):
    try:
        source_url = _urlparse(source)
    except TypeError:
        return '', {}, ('Invalid format for source parameter')

    protos = ('salt', 'http', 'https', 'ftp', 'swift', 's3', 'file')

    log.trace("parsed source looks like: {0}".format(source_url))
    if not source_url.scheme or source_url.scheme == 'file':
        # just a regular file
        filename = os.path.abspath(source_url.path)
        sname = os.path.basename(filename)
        log.debug("Source is a regular local file: {0}".format(source_url.path))
        if _is_dns_subdomain(sname) and _is_valid_secret_file(filename):
            return sname, _file_encode(filename)
    else:
        if source_url.scheme in protos:
            # The source is a file on a server
            filename = __salt__['cp.cache_file'](source, saltenv)
            if not filename:
                log.warn("Source file: {0} can not be retrieved".format(source))
                return "", ""
            return os.path.basename(filename), _file_encode(filename)
    return "", ""
Beispiel #3
0
def _get_src(tree_base, source, saltenv='base'):
    '''
    Get the named sources and place them into the tree_base
    '''
    parsed = _urlparse(source)
    sbase = os.path.basename(source)
    dest = os.path.join(tree_base, sbase)
    if parsed.scheme:
        __salt__['cp.get_url'](source, dest, saltenv=saltenv)
    else:
        shutil.copy(source, dest)
Beispiel #4
0
def _add_http_basic_auth(repository, https_user=None, https_pass=None):
    if https_user is None and https_pass is None:
        return repository
    else:
        urltuple = _urlparse(repository)
        if urltuple.scheme == 'https':
            netloc = "{0}:{1}@{2}".format(https_user, https_pass,
                                          urltuple.netloc)
            urltuple = urltuple._replace(netloc=netloc)
            return _urlunparse(urltuple)
        else:
            raise ValueError('Basic Auth only supported for HTTPS scheme')
Beispiel #5
0
def _add_http_basic_auth(repository, https_user=None, https_pass=None):
    if https_user is None and https_pass is None:
        return repository
    else:
        urltuple = _urlparse(repository)
        if urltuple.scheme == 'https':
            netloc = "{0}:{1}@{2}".format(https_user, https_pass,
                                          urltuple.netloc)
            urltuple = urltuple._replace(netloc=netloc)
            return _urlunparse(urltuple)
        else:
            raise ValueError('Basic Auth only supported for HTTPS scheme')
Beispiel #6
0
 def find_file(self, path, saltenv, back=None):
     '''
     Find the path and return the fnd structure, this structure is passed
     to other backend interfaces.
     '''
     back = self._gen_back(back)
     kwargs = {}
     fnd = {'path': '',
            'rel': ''}
     if os.path.isabs(path):
         return fnd
     if '../' in path:
         return fnd
     if salt.utils.url.is_escaped(path):
         # don't attempt to find URL query arguements in the path
         path = salt.utils.url.unescape(path)
     else:
         split_path = _urlparse(path)
         path = split_path.path
         query = _parse_qs(split_path.query)
         kwargs.update(query)
     if 'env' in kwargs:
         salt.utils.warn_until(
             'Boron',
             'Passing a salt environment should be done using \'saltenv\' '
             'not \'env\'. This functionality will be removed in Salt '
             'Boron.'
         )
         saltenv = kwargs.pop('env')
     elif 'saltenv' in kwargs:
         saltenv = kwargs.pop('saltenv')
     for fsb in back:
         fstr = '{0}.find_file'.format(fsb)
         if fstr in self.servers:
             fnd = self.servers[fstr](path, saltenv, **kwargs)
             if fnd.get('path'):
                 fnd['back'] = fsb
                 return fnd
     return fnd
Beispiel #7
0
def _get_deps(deps, tree_base, saltenv='base'):
    '''
    Get include string for list of dependent rpms to build package
    '''
    deps_list = ''
    if deps is None:
        return deps_list
    if not isinstance(deps, list):
        raise SaltInvocationError(
            '\'deps\' must be a Python list or comma-separated string')
    for deprpm in deps:
        parsed = _urlparse(deprpm)
        depbase = os.path.basename(deprpm)
        dest = os.path.join(tree_base, depbase)
        if parsed.scheme:
            __salt__['cp.get_url'](deprpm, dest, saltenv=saltenv)
        else:
            shutil.copy(deprpm, dest)

        deps_list += ' {0}'.format(dest)

    return deps_list
Beispiel #8
0
def _get_deps(deps, tree_base, saltenv="base"):
    """
    Get include string for list of dependent rpms to build package
    """
    deps_list = ""
    if deps is None:
        return deps_list
    if not isinstance(deps, list):
        raise SaltInvocationError(
            "'deps' must be a Python list or comma-separated string")
    for deprpm in deps:
        parsed = _urlparse(deprpm)
        depbase = os.path.basename(deprpm)
        dest = os.path.join(tree_base, depbase)
        if parsed.scheme:
            __salt__["cp.get_url"](deprpm, dest, saltenv=saltenv)
        else:
            shutil.copy(deprpm, dest)

        deps_list += " {0}".format(dest)

    return deps_list
Beispiel #9
0
def _get_source_sum(source_hash, file_path, saltenv):
    '''
    Extract the hash sum, whether it is in a remote hash file, or just a string.
    '''
    ret = dict()
    schemes = ('salt', 'http', 'https', 'ftp', 'swift', 's3', 'file')
    invalid_hash_msg = (
        "Source hash '{0}' format is invalid. It must be in the format"
        ' <hash type>=<hash>').format(source_hash)
    source_hash = str(source_hash)
    source_hash_scheme = _urlparse(source_hash).scheme

    if source_hash_scheme in schemes:
        # The source_hash is a file on a server
        cached_hash_file = __salt__['cp.cache_file'](source_hash, saltenv)

        if not cached_hash_file:
            raise CommandExecutionError(('Source hash file {0} not'
                                         ' found').format(source_hash))

        ret = __salt__['file.extract_hash'](cached_hash_file, '', file_path)
        if ret is None:
            raise SaltInvocationError(invalid_hash_msg)
    else:
        # The source_hash is a hash string
        items = source_hash.split('=', 1)

        if len(items) != 2:
            invalid_hash_msg = ('{0}, or it must be a supported protocol'
                                ': {1}').format(invalid_hash_msg,
                                                ', '.join(schemes))
            raise SaltInvocationError(invalid_hash_msg)

        ret['hash_type'], ret['hsum'] = [
            item.strip().lower() for item in items
        ]

    return ret
Beispiel #10
0
 def find_file(self, path, saltenv, back=None):
     """
     Find the path and return the fnd structure, this structure is passed
     to other backend interfaces.
     """
     back = self._gen_back(back)
     kwargs = {}
     fnd = {"path": "", "rel": ""}
     if os.path.isabs(path):
         return fnd
     if "../" in path:
         return fnd
     if salt.utils.url.is_escaped(path):
         # don't attempt to find URL query arguements in the path
         path = salt.utils.url.unescape(path)
     else:
         split_path = _urlparse(path)
         path = split_path.path
         query = _parse_qs(split_path.query)
         kwargs.update(query)
     if "env" in kwargs:
         salt.utils.warn_until(
             "Boron",
             "Passing a salt environment should be done using 'saltenv' "
             "not 'env'. This functionality will be removed in Salt "
             "Boron.",
         )
         saltenv = kwargs.pop("env")
     elif "saltenv" in kwargs:
         saltenv = kwargs.pop("saltenv")
     for fsb in back:
         fstr = "{0}.find_file".format(fsb)
         if fstr in self.servers:
             fnd = self.servers[fstr](path, saltenv, **kwargs)
             if fnd.get("path"):
                 fnd["back"] = fsb
                 return fnd
     return fnd
Beispiel #11
0
def _get_deps(deps, tree_base, saltenv='base'):
    '''
    Get include string for list of dependent rpms to build package
    '''
    deps_list = ""
    if deps is None:
        return deps_list
    if not isinstance(deps, list):
        raise SaltInvocationError(
            '\'deps\' must be a Python list or comma-separated string'
        )
    for deprpm in deps:
        parsed = _urlparse(deprpm)
        depbase = os.path.basename(deprpm)
        dest = os.path.join(tree_base, depbase)
        if parsed.scheme:
            __salt__['cp.get_url'](deprpm, dest, saltenv=saltenv)
        else:
            shutil.copy(deprpm, dest)

        deps_list += ' --install {0}'.format(dest)

    return deps_list
Beispiel #12
0
def mod_repo(repo, **kwargs):
    '''
    Modify one or more values for a repo. If the repo does not exist, it will
    be created, so long as the following values are specified:

    repo or alias
        alias by which the zypper refers to the repo

    url, mirrorlist or baseurl
        the URL for zypper to reference

    enabled
        enable or disable (True or False) repository,
        but do not remove if disabled.

    refresh
        enable or disable (True or False) auto-refresh of the repository.

    cache
        Enable or disable (True or False) RPM files caching.

    gpgcheck
        Enable or disable (True or False) GOG check for this repository.

    Key/Value pairs may also be removed from a repo's configuration by setting
    a key to a blank value. Bear in mind that a name cannot be deleted, and a
    url can only be deleted if a mirrorlist is specified (or vice versa).

    CLI Examples:

    .. code-block:: bash

        salt '*' pkg.mod_repo alias alias=new_alias
        salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/
    '''

    repos_cfg = _get_configured_repos()
    added = False

    # An attempt to add new one?
    if repo not in repos_cfg.sections():
        url = kwargs.get('url', kwargs.get('mirrorlist', kwargs.get('baseurl')))
        if not url:
            raise CommandExecutionError(
                'Repository \'{0}\' not found and no URL passed to create one.'.format(repo))

        if not _urlparse(url).scheme:
            raise CommandExecutionError(
                'Repository \'{0}\' not found and passed URL looks wrong.'.format(repo))

        # Is there already such repo under different alias?
        for alias in repos_cfg.sections():
            repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg)

            # Complete user URL, in case it is not
            new_url = _urlparse(url)
            if not new_url.path:
                new_url = _urlparse.ParseResult(scheme=new_url.scheme,  # pylint: disable=E1123
                                               netloc=new_url.netloc,
                                               path='/',
                                               params=new_url.params,
                                               query=new_url.query,
                                               fragment=new_url.fragment)
            base_url = _urlparse(repo_meta['baseurl'])

            if new_url == base_url:
                raise CommandExecutionError(
                    'Repository \'{0}\' already exists as \'{1}\'.'.format(repo, alias))

        # Add new repo
        doc = None
        try:
            # Try to parse the output and find the error,
            # but this not always working (depends on Zypper version)
            doc = dom.parseString(__salt__['cmd.run'](('zypper -x ar {0} \'{1}\''.format(url, repo)),
                                                      output_loglevel='trace'))
        except Exception:
            # No XML out available, but it is still unknown the state of the result.
            pass

        if doc:
            msg_nodes = doc.getElementsByTagName('message')
            if msg_nodes:
                msg_node = msg_nodes[0]
                if msg_node.getAttribute('type') == 'error':
                    raise CommandExecutionError(msg_node.childNodes[0].nodeValue)

        # Verify the repository has been added
        repos_cfg = _get_configured_repos()
        if repo not in repos_cfg.sections():
            raise CommandExecutionError(
                'Failed add new repository \'{0}\' for unknown reason. '
                'Please look into Zypper logs.'.format(repo))
        added = True

    # Modify added or existing repo according to the options
    cmd_opt = []

    if 'enabled' in kwargs:
        cmd_opt.append(kwargs['enabled'] and '--enable' or '--disable')

    if 'refresh' in kwargs:
        cmd_opt.append(kwargs['refresh'] and '--refresh' or '--no-refresh')

    if 'cache' in kwargs:
        cmd_opt.append(kwargs['cache'] and '--keep-packages' or '--no-keep-packages')

    if 'gpgcheck' in kwargs:
        cmd_opt.append(kwargs['gpgcheck'] and '--gpgcheck' or '--no-gpgcheck')

    if cmd_opt:
        __salt__['cmd.run'](('zypper -x mr {0} \'{1}\''.format(' '.join(cmd_opt), repo)),
                            output_loglevel='trace')

    # If repo nor added neither modified, error should be thrown
    if not added and not cmd_opt:
        raise CommandExecutionError(
                'Modification of the repository \'{0}\' was not specified.'.format(repo))

    return {}
Beispiel #13
0
def cache_file(path, saltenv='base', source_hash=None):
    '''
    Used to cache a single file on the Minion

    Returns the location of the new cached file on the Minion

    source_hash
        If ``name`` is an http(s) or ftp URL and the file exists in the
        minion's file cache, this option can be passed to keep the minion from
        re-downloading the file if the cached copy matches the specified hash.

        .. versionadded:: 2018.3.0

    CLI Example:

    .. code-block:: bash

        salt '*' cp.cache_file salt://path/to/file

    There are two ways of defining the fileserver environment (a.k.a.
    ``saltenv``) from which to cache the file. One is to use the ``saltenv``
    parameter, and the other is to use a querystring syntax in the ``salt://``
    URL. The below two examples are equivalent:

    .. code-block:: bash

        salt '*' cp.cache_file salt://foo/bar.conf saltenv=config
        salt '*' cp.cache_file salt://foo/bar.conf?saltenv=config

    If the path being cached is a ``salt://`` URI, and the path does not exist,
    then ``False`` will be returned.

    .. note::
        It may be necessary to quote the URL when using the querystring method,
        depending on the shell being used to run the command.
    '''
    path = salt.utils.locales.sdecode(path)
    saltenv = salt.utils.locales.sdecode(saltenv)

    contextkey = '{0}_|-{1}_|-{2}'.format('cp.cache_file', path, saltenv)

    path_is_remote = _urlparse(path).scheme in ('http', 'https', 'ftp')
    try:
        if path_is_remote and contextkey in __context__:
            # Prevent multiple caches in the same salt run. Affects remote URLs
            # since the master won't know their hash, so the fileclient
            # wouldn't be able to prevent multiple caches if we try to cache
            # the remote URL more than once.
            if os.path.isfile(__context__[contextkey]):
                return __context__[contextkey]
            else:
                # File is in __context__ but no longer exists in the minion
                # cache, get rid of the context key and re-cache below.
                # Accounts for corner case where file is removed from minion
                # cache between cp.cache_file calls in the same salt-run.
                __context__.pop(contextkey)
    except AttributeError:
        pass

    path, senv = salt.utils.url.split_env(path)
    if senv:
        saltenv = senv

    result = _client().cache_file(path, saltenv, source_hash=source_hash)
    if not result:
        log.error('Unable to cache file \'%s\' from saltenv \'%s\'.', path,
                  saltenv)
    if path_is_remote:
        # Cache was successful, store the result in __context__ to prevent
        # multiple caches (see above).
        __context__[contextkey] = result
    return result
Beispiel #14
0
def list_(name,
          archive_format=None,
          options=None,
          strip_components=None,
          clean=False,
          verbose=False,
          saltenv='base',
          source_hash=None):
    '''
    .. versionadded:: 2016.11.0
    .. versionchanged:: 2016.11.2
        The rarfile_ Python module is now supported for listing the contents of
        rar archives. This is necessary on minions with older releases of the
        ``rar`` CLI tool, which do not support listing the contents in a
        parsable format.

    .. _rarfile: https://pypi.python.org/pypi/rarfile

    List the files and directories in an tar, zip, or rar archive.

    .. note::
        This function will only provide results for XZ-compressed archives if
        the xz_ CLI command is available, as Python does not at this time
        natively support XZ compression in its tarfile_ module. Keep in mind
        however that most Linux distros ship with xz_ already installed.

        To check if a given minion has xz_, the following Salt command can be
        run:

        .. code-block:: bash

            salt minion_id cmd.which xz

        If ``None`` is returned, then xz_ is not present and must be installed.
        It is widely available and should be packaged as either ``xz`` or
        ``xz-utils``.

    name
        Path/URL of archive

    archive_format
        Specify the format of the archive (``tar``, ``zip``, or ``rar``). If
        this argument is omitted, the archive format will be guessed based on
        the value of the ``name`` parameter.

    options
        **For tar archives only.** This function will, by default, try to use
        the tarfile_ module from the Python standard library to get a list of
        files/directories. If this method fails, then it will fall back to
        using the shell to decompress the archive to stdout and pipe the
        results to ``tar -tf -`` to produce a list of filenames. XZ-compressed
        archives are already supported automatically, but in the event that the
        tar archive uses a different sort of compression not supported natively
        by tarfile_, this option can be used to specify a command that will
        decompress the archive to stdout. For example:

        .. code-block:: bash

            salt minion_id archive.list /path/to/foo.tar.gz options='gzip --decompress --stdout'

        .. note::
            It is not necessary to manually specify options for gzip'ed
            archives, as gzip compression is natively supported by tarfile_.

    strip_components
        This argument specifies a number of top-level directories to strip from
        the results. This is similar to the paths that would be extracted if
        ``--strip-components`` (or ``--strip``) were used when extracting tar
        archives.

        .. versionadded:: 2016.11.2

    clean : False
        Set this value to ``True`` to delete the path referred to by ``name``
        once the contents have been listed. This option should be used with
        care.

        .. note::
            If there is an error listing the archive's contents, the cached
            file will not be removed, to allow for troubleshooting.

    verbose : False
        If ``False``, this function will return a list of files/dirs in the
        archive. If ``True``, it will return a dictionary categorizing the
        paths into separate keys containing the directory names, file names,
        and also directories/files present in the top level of the archive.

        .. versionchanged:: 2016.11.2
            This option now includes symlinks in their own list. Before, they
            were included with files.

    saltenv : base
        Specifies the fileserver environment from which to retrieve
        ``archive``. This is only applicable when ``archive`` is a file from
        the ``salt://`` fileserver.

    source_hash
        If ``name`` is an http(s)/ftp URL and the file exists in the minion's
        file cache, this option can be passed to keep the minion from
        re-downloading the archive if the cached copy matches the specified
        hash.

        .. versionadded:: 2018.3.0

    .. _tarfile: https://docs.python.org/2/library/tarfile.html
    .. _xz: http://tukaani.org/xz/

    CLI Examples:

    .. code-block:: bash

            salt '*' archive.list /path/to/myfile.tar.gz
            salt '*' archive.list /path/to/myfile.tar.gz strip_components=1
            salt '*' archive.list salt://foo.tar.gz
            salt '*' archive.list https://domain.tld/myfile.zip
            salt '*' archive.list https://domain.tld/myfile.zip source_hash=f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
            salt '*' archive.list ftp://10.1.2.3/foo.rar
    '''
    def _list_tar(name, cached, decompress_cmd, failhard=False):
        '''
        List the contents of a tar archive.
        '''
        dirs = []
        files = []
        links = []
        try:
            open_kwargs = {'name': cached} \
                if not isinstance(cached, subprocess.Popen) \
                else {'fileobj': cached.stdout, 'mode': 'r|'}
            with contextlib.closing(
                    tarfile.open(**open_kwargs)) as tar_archive:
                for member in tar_archive.getmembers():
                    if member.issym():
                        links.append(member.name)
                    elif member.isdir():
                        dirs.append(member.name + '/')
                    else:
                        files.append(member.name)
            return dirs, files, links

        except tarfile.ReadError:
            if failhard:
                if isinstance(cached, subprocess.Popen):
                    stderr = cached.communicate()[1]
                    if cached.returncode != 0:
                        raise CommandExecutionError(
                            'Failed to decompress {0}'.format(name),
                            info={'error': stderr})
            else:
                if not salt.utils.path.which('tar'):
                    raise CommandExecutionError(
                        '\'tar\' command not available')
                if decompress_cmd is not None:
                    # Guard against shell injection
                    try:
                        decompress_cmd = ' '.join(
                            [_quote(x) for x in shlex.split(decompress_cmd)])
                    except AttributeError:
                        raise CommandExecutionError('Invalid CLI options')
                else:
                    if salt.utils.path.which('xz') \
                            and __salt__['cmd.retcode'](['xz', '-t', cached],
                                                        python_shell=False,
                                                        ignore_retcode=True) == 0:
                        decompress_cmd = 'xz --decompress --stdout'

                if decompress_cmd:
                    decompressed = subprocess.Popen('{0} {1}'.format(
                        decompress_cmd, _quote(cached)),
                                                    shell=True,
                                                    stdout=subprocess.PIPE,
                                                    stderr=subprocess.PIPE)
                    return _list_tar(name, decompressed, None, True)

        raise CommandExecutionError(
            'Unable to list contents of {0}. If this is an XZ-compressed tar '
            'archive, install XZ Utils to enable listing its contents. If it '
            'is compressed using something other than XZ, it may be necessary '
            'to specify CLI options to decompress the archive. See the '
            'documentation for details.'.format(name))

    def _list_zip(name, cached):
        '''
        List the contents of a zip archive.
        Password-protected ZIP archives can still be listed by zipfile, so
        there is no reason to invoke the unzip command.
        '''
        dirs = set()
        files = []
        links = []
        try:
            with contextlib.closing(zipfile.ZipFile(cached)) as zip_archive:
                for member in zip_archive.infolist():
                    path = member.filename
                    if salt.utils.platform.is_windows():
                        if path.endswith('/'):
                            # zipfile.ZipInfo objects on windows use forward
                            # slash at end of the directory name.
                            dirs.add(path)
                        else:
                            files.append(path)
                    else:
                        mode = member.external_attr >> 16
                        if stat.S_ISLNK(mode):
                            links.append(path)
                        elif stat.S_ISDIR(mode):
                            dirs.add(path)
                        else:
                            files.append(path)

                _files = copy.deepcopy(files)
                for path in _files:
                    # ZIP files created on Windows do not add entries
                    # to the archive for directories. So, we'll need to
                    # manually add them.
                    dirname = ''.join(path.rpartition('/')[:2])
                    if dirname:
                        dirs.add(dirname)
                        if dirname in files:
                            files.remove(dirname)
            return list(dirs), files, links
        except zipfile.BadZipfile:
            raise CommandExecutionError('{0} is not a ZIP file'.format(name))

    def _list_rar(name, cached):
        '''
        List the contents of a rar archive.
        '''
        dirs = []
        files = []
        if HAS_RARFILE:
            with rarfile.RarFile(cached) as rf:
                for member in rf.infolist():
                    path = member.filename.replace('\\', '/')
                    if member.isdir():
                        dirs.append(path + '/')
                    else:
                        files.append(path)
        else:
            if not salt.utils.path.which('rar'):
                raise CommandExecutionError(
                    'rar command not available, is it installed?')
            output = __salt__['cmd.run'](['rar', 'lt', name],
                                         python_shell=False,
                                         ignore_retcode=False)
            matches = re.findall(r'Name:\s*([^\n]+)\s*Type:\s*([^\n]+)',
                                 output)
            for path, type_ in matches:
                if type_ == 'Directory':
                    dirs.append(path + '/')
                else:
                    files.append(path)
            if not dirs and not files:
                raise CommandExecutionError(
                    'Failed to list {0}, is it a rar file? If so, the '
                    'installed version of rar may be too old to list data in '
                    'a parsable format. Installing the rarfile Python module '
                    'may be an easier workaround if newer rar is not readily '
                    'available.'.format(name),
                    info={'error': output})
        return dirs, files, []

    cached = __salt__['cp.cache_file'](name, saltenv, source_hash=source_hash)
    if not cached:
        raise CommandExecutionError('Failed to cache {0}'.format(name))

    try:
        if strip_components:
            try:
                int(strip_components)
            except ValueError:
                strip_components = -1

            if strip_components <= 0:
                raise CommandExecutionError(
                    '\'strip_components\' must be a positive integer')

        parsed = _urlparse(name)
        path = parsed.path or parsed.netloc

        def _unsupported_format(archive_format):
            '''
            Raise the proper exception message for the given archive format.
            '''
            if archive_format is None:
                raise CommandExecutionError(
                    'Unable to guess archive format, please pass an '
                    '\'archive_format\' argument.')
            raise CommandExecutionError(
                'Unsupported archive format \'{0}\''.format(archive_format))

        if not archive_format:
            guessed_format = salt.utils.files.guess_archive_type(path)
            if guessed_format is None:
                _unsupported_format(archive_format)
            archive_format = guessed_format

        func = locals().get('_list_' + archive_format)
        if not hasattr(func, '__call__'):
            _unsupported_format(archive_format)

        args = (options, ) if archive_format == 'tar' else ()
        try:
            dirs, files, links = func(name, cached, *args)
        except (IOError, OSError) as exc:
            raise CommandExecutionError(
                'Failed to list contents of {0}: {1}'.format(
                    name, exc.__str__()))
        except CommandExecutionError as exc:
            raise
        except Exception as exc:
            raise CommandExecutionError(
                'Uncaught exception \'{0}\' when listing contents of {1}'.
                format(exc, name))

        if clean:
            try:
                os.remove(cached)
                log.debug('Cleaned cached archive %s', cached)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    log.warning('Failed to clean cached archive %s: %s',
                                cached, exc.__str__())

        if strip_components:
            for item in (dirs, files, links):
                for index, path in enumerate(item):
                    try:
                        # Strip off the specified number of directory
                        # boundaries, and grab what comes after the last
                        # stripped path separator.
                        item[index] = item[index].split(
                            os.sep, strip_components)[strip_components]
                    except IndexError:
                        # Path is excluded by strip_components because it is not
                        # deep enough. Set this to an empty string so it can
                        # be removed in the generator expression below.
                        item[index] = ''

                # Remove all paths which were excluded
                item[:] = (x for x in item if x)
                item.sort()

        if verbose:
            ret = {
                'dirs': sorted(salt.utils.data.decode_list(dirs)),
                'files': sorted(salt.utils.data.decode_list(files)),
                'links': sorted(salt.utils.data.decode_list(links))
            }
            ret['top_level_dirs'] = [
                x for x in ret['dirs'] if x.count('/') == 1
            ]
            ret['top_level_files'] = [
                x for x in ret['files'] if x.count('/') == 0
            ]
            ret['top_level_links'] = [
                x for x in ret['links'] if x.count('/') == 0
            ]
        else:
            ret = sorted(dirs + files + links)
        return ret

    except CommandExecutionError as exc:
        # Reraise with cache path in the error so that the user can examine the
        # cached archive for troubleshooting purposes.
        info = exc.info or {}
        info['archive location'] = cached
        raise CommandExecutionError(exc.error, info=info)
Beispiel #15
0
def cache_file(path, saltenv='base', env=None):
    '''
    Used to cache a single file on the salt-minion
    Returns the location of the new cached file on the minion

    CLI Example:

    .. code-block:: bash

        salt '*' cp.cache_file salt://path/to/file

    There are two ways of defining the fileserver environment (a.k.a.
    ``saltenv``) from which to cache the file. One is to use the ``saltenv``
    parameter, and the other is to use a querystring syntax in the ``salt://``
    URL. The below two examples are equivalent:

    .. code-block:: bash

        salt '*' cp.cache_file salt://foo/bar.conf saltenv=config
        salt '*' cp.cache_file salt://foo/bar.conf?saltenv=config

    .. note::
        It may be necessary to quote the URL when using the querystring method,
        depending on the shell being used to run the command.
    '''
    if env is not None:
        salt.utils.warn_until(
            'Boron',
            'Passing a salt environment should be done using \'saltenv\' '
            'not \'env\'. This functionality will be removed in Salt Boron.'
        )
        # Backwards compatibility
        saltenv = env

    contextkey = '{0}_|-{1}_|-{2}'.format('cp.cache_file', path, saltenv)
    path_is_remote = _urlparse(path).scheme in ('http', 'https', 'ftp')
    try:
        if path_is_remote and contextkey in __context__:
            # Prevent multiple caches in the same salt run. Affects remote URLs
            # since the master won't know their hash, so the fileclient
            # wouldn't be able to prevent multiple caches if we try to cache
            # the remote URL more than once.
            if os.path.isfile(__context__[contextkey]):
                return __context__[contextkey]
            else:
                # File is in __context__ but no longer exists in the minion
                # cache, get rid of the context key and re-cache below.
                # Accounts for corner case where file is removed from minion
                # cache between cp.cache_file calls in the same salt-run.
                __context__.pop(contextkey)
    except AttributeError:
        pass

    path, senv = salt.utils.url.split_env(path)
    if senv:
        saltenv = senv

    result = _client().cache_file(path, saltenv)
    if not result:
        log.error(
            'Unable to cache file {0!r} from saltenv {1!r}.'.format(
                path, saltenv
            )
        )
    if path_is_remote:
        # Cache was successful, store the result in __context__ to prevent
        # multiple caches (see above).
        __context__[contextkey] = result
    return result
Beispiel #16
0
def list_(name,
          archive_format=None,
          options=None,
          clean=False,
          verbose=False,
          saltenv='base'):
    '''
    .. versionadded:: 2016.11.0

    List the files and directories in an tar, zip, or rar archive.

    .. note::
        This function will only provide results for XZ-compressed archives if
        xz-utils_ is installed, as Python does not at this time natively
        support XZ compression in its tarfile_ module.

    name
        Path/URL of archive

    archive_format
        Specify the format of the archive (``tar``, ``zip``, or ``rar``). If
        this argument is omitted, the archive format will be guessed based on
        the value of the ``name`` parameter.

    options
        **For tar archives only.** This function will, by default, try to use
        the tarfile_ module from the Python standard library to get a list of
        files/directories. If this method fails, then it will fall back to
        using the shell to decompress the archive to stdout and pipe the
        results to ``tar -tf -`` to produce a list of filenames. XZ-compressed
        archives are already supported automatically, but in the event that the
        tar archive uses a different sort of compression not supported natively
        by tarfile_, this option can be used to specify a command that will
        decompress the archive to stdout. For example:

        .. code-block:: bash

            salt minion_id archive.list /path/to/foo.tar.gz options='gzip --decompress --stdout'

        .. note::
            It is not necessary to manually specify options for gzip'ed
            archives, as gzip compression is natively supported by tarfile_.

    clean : False
        Set this value to ``True`` to delete the path referred to by ``name``
        once the contents have been listed. This option should be used with
        care.

        .. note::
            If there is an error listing the archive's contents, the cached
            file will not be removed, to allow for troubleshooting.

    verbose : False
        If ``False``, this function will return a list of files/dirs in the
        archive. If ``True``, it will return a dictionary categorizing the
        paths into separate keys containing the directory names, file names,
        and also directories/files present in the top level of the archive.

    saltenv : base
        Specifies the fileserver environment from which to retrieve
        ``archive``. This is only applicable when ``archive`` is a file from
        the ``salt://`` fileserver.

    .. _tarfile: https://docs.python.org/2/library/tarfile.html
    .. _xz-utils: http://tukaani.org/xz/

    CLI Examples:

    .. code-block:: bash

            salt '*' archive.list /path/to/myfile.tar.gz
            salt '*' archive.list salt://foo.tar.gz
            salt '*' archive.list https://domain.tld/myfile.zip
            salt '*' archive.list ftp://10.1.2.3/foo.rar
    '''
    def _list_tar(name, cached, decompress_cmd):
        try:
            with contextlib.closing(tarfile.open(cached)) as tar_archive:
                return [
                    x.name + '/' if x.isdir() else x.name
                    for x in tar_archive.getmembers()
                ]
        except tarfile.ReadError:
            if not salt.utils.which('tar'):
                raise CommandExecutionError('\'tar\' command not available')
            if decompress_cmd is not None:
                # Guard against shell injection
                try:
                    decompress_cmd = ' '.join(
                        [_quote(x) for x in shlex.split(decompress_cmd)])
                except AttributeError:
                    raise CommandExecutionError('Invalid CLI options')
            else:
                if salt.utils.which('xz') \
                        and __salt__['cmd.retcode'](['xz', '-l', cached],
                                                    python_shell=False,
                                                    ignore_retcode=True) == 0:
                    decompress_cmd = 'xz --decompress --stdout'

            if decompress_cmd:
                cmd = '{0} {1} | tar tf -'.format(decompress_cmd,
                                                  _quote(cached))
                result = __salt__['cmd.run_all'](cmd, python_shell=True)
                if result['retcode'] != 0:
                    raise CommandExecutionError(
                        'Failed to decompress {0}'.format(name),
                        info={'error': result['stderr']})
                ret = []
                for line in salt.utils.itertools.split(result['stdout'], '\n'):
                    line = line.strip()
                    if line:
                        ret.append(line)
                return ret

        raise CommandExecutionError(
            'Unable to list contents of {0}. If this is an XZ-compressed tar '
            'archive, install xz-utils to enable listing its contents. If it '
            'is compressed using something other than XZ, it may be necessary '
            'to specify CLI options to decompress the archive. See the '
            'documentation for details.'.format(name))

    def _list_zip(name, cached):
        # Password-protected ZIP archives can still be listed by zipfile, so
        # there is no reason to invoke the unzip command.
        try:
            with contextlib.closing(zipfile.ZipFile(cached)) as zip_archive:
                return zip_archive.namelist()
        except zipfile.BadZipfile:
            raise CommandExecutionError('{0} is not a ZIP file'.format(name))

    def _list_rar(name, cached):
        output = __salt__['cmd.run'](['rar', 'lt', path],
                                     python_shell=False,
                                     ignore_retcode=False)
        matches = re.findall(r'Name:\s*([^\n]+)\s*Type:\s*([^\n]+)', output)
        ret = [x + '/' if y == 'Directory' else x for x, y in matches]
        if not ret:
            raise CommandExecutionError(
                'Failed to decompress {0}'.format(name),
                info={'error': output})
        return ret

    cached = __salt__['cp.cache_file'](name, saltenv)
    if not cached:
        raise CommandExecutionError('Failed to cache {0}'.format(name))

    try:
        parsed = _urlparse(name)
        path = parsed.path or parsed.netloc

        def _unsupported_format(archive_format):
            if archive_format is None:
                raise CommandExecutionError(
                    'Unable to guess archive format, please pass an '
                    '\'archive_format\' argument.')
            raise CommandExecutionError(
                'Unsupported archive format \'{0}\''.format(archive_format))

        if not archive_format:
            guessed_format = salt.utils.files.guess_archive_type(path)
            if guessed_format is None:
                _unsupported_format(archive_format)
            archive_format = guessed_format

        func = locals().get('_list_' + archive_format)
        if not hasattr(func, '__call__'):
            _unsupported_format(archive_format)

        args = (options, ) if archive_format == 'tar' else ()
        try:
            ret = func(name, cached, *args)
        except (IOError, OSError) as exc:
            raise CommandExecutionError(
                'Failed to list contents of {0}: {1}'.format(
                    name, exc.__str__()))
        except CommandExecutionError as exc:
            raise
        except Exception as exc:
            raise CommandExecutionError(
                'Uncaught exception \'{0}\' when listing contents of {1}'.
                format(exc, name))

        if clean:
            try:
                os.remove(cached)
                log.debug('Cleaned cached archive %s', cached)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    log.warning('Failed to clean cached archive %s: %s',
                                cached, exc.__str__())
        if verbose:
            verbose_ret = {
                'dirs': [],
                'files': [],
                'top_level_dirs': [],
                'top_level_files': []
            }
            for item in ret:
                if item.endswith('/'):
                    verbose_ret['dirs'].append(item)
                    if item.count('/') == 1:
                        verbose_ret['top_level_dirs'].append(item)
                else:
                    verbose_ret['files'].append(item)
                    if item.count('/') == 0:
                        verbose_ret['top_level_files'].append(item)
            ret = verbose_ret
        return ret
    except CommandExecutionError as exc:
        # Reraise with cache path in the error so that the user can examine the
        # cached archive for troubleshooting purposes.
        info = exc.info or {}
        info['archive location'] = cached
        raise CommandExecutionError(exc.error, info=info)
Beispiel #17
0
def mod_repo(repo, **kwargs):
    '''
    Modify one or more values for a repo. If the repo does not exist, it will
    be created, so long as the following values are specified:

    repo or alias
        alias by which the zypper refers to the repo

    url, mirrorlist or baseurl
        the URL for zypper to reference

    enabled
        enable or disable (True or False) repository,
        but do not remove if disabled.

    refresh
        enable or disable (True or False) auto-refresh of the repository.

    cache
        Enable or disable (True or False) RPM files caching.

    gpgcheck
        Enable or disable (True or False) GOG check for this repository.

    gpgautoimport
        Automatically trust and import new repository.

    Key/Value pairs may also be removed from a repo's configuration by setting
    a key to a blank value. Bear in mind that a name cannot be deleted, and a
    url can only be deleted if a mirrorlist is specified (or vice versa).

    CLI Examples:

    .. code-block:: bash

        salt '*' pkg.mod_repo alias alias=new_alias
        salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/
    '''

    repos_cfg = _get_configured_repos()
    added = False

    # An attempt to add new one?
    if repo not in repos_cfg.sections():
        url = kwargs.get('url', kwargs.get('mirrorlist',
                                           kwargs.get('baseurl')))
        if not url:
            raise CommandExecutionError(
                'Repository \'{0}\' not found, and neither \'baseurl\' nor '
                '\'mirrorlist\' was specified'.format(repo))

        if not _urlparse(url).scheme:
            raise CommandExecutionError(
                'Repository \'{0}\' not found and URL for baseurl/mirrorlist '
                'is malformed'.format(repo))

        # Is there already such repo under different alias?
        for alias in repos_cfg.sections():
            repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg)

            # Complete user URL, in case it is not
            new_url = _urlparse(url)
            if not new_url.path:
                new_url = _urlparse.ParseResult(
                    scheme=new_url.scheme,  # pylint: disable=E1123
                    netloc=new_url.netloc,
                    path='/',
                    params=new_url.params,
                    query=new_url.query,
                    fragment=new_url.fragment)
            base_url = _urlparse(repo_meta['baseurl'])

            if new_url == base_url:
                raise CommandExecutionError(
                    'Repository \'{0}\' already exists as \'{1}\'.'.format(
                        repo, alias))

        # Add new repo
        __zypper__.xml.call('ar', url, repo)

        # Verify the repository has been added
        repos_cfg = _get_configured_repos()
        if repo not in repos_cfg.sections():
            raise CommandExecutionError(
                'Failed add new repository \'{0}\' for unspecified reason. '
                'Please check zypper logs.'.format(repo))
        added = True

    # Modify added or existing repo according to the options
    cmd_opt = []
    global_cmd_opt = []
    call_refresh = False

    if 'enabled' in kwargs:
        cmd_opt.append(kwargs['enabled'] and '--enable' or '--disable')

    if 'refresh' in kwargs:
        cmd_opt.append(kwargs['refresh'] and '--refresh' or '--no-refresh')

    if 'cache' in kwargs:
        cmd_opt.append(kwargs['cache'] and '--keep-packages'
                       or '--no-keep-packages')

    if 'gpgcheck' in kwargs:
        cmd_opt.append(kwargs['gpgcheck'] and '--gpgcheck' or '--no-gpgcheck')

    if 'priority' in kwargs:
        cmd_opt.append("--priority={0}".format(
            kwargs.get('priority', DEFAULT_PRIORITY)))

    if 'humanname' in kwargs:
        cmd_opt.append("--name='{0}'".format(kwargs.get('humanname')))

    if kwargs.get('gpgautoimport') is True:
        global_cmd_opt.append('--gpg-auto-import-keys')
        call_refresh = True

    if cmd_opt:
        cmd_opt = global_cmd_opt + ['mr'] + cmd_opt + [repo]
        __zypper__.refreshable.xml.call(*cmd_opt)

    if call_refresh:
        # when used with "zypper ar --refresh" or "zypper mr --refresh"
        # --gpg-auto-import-keys is not doing anything
        # so we need to specifically refresh here with --gpg-auto-import-keys
        refresh_opts = global_cmd_opt + ['refresh'] + [repo]
        __zypper__.xml.call(*refresh_opts)
    elif not added and not cmd_opt:
        raise CommandExecutionError(
            'Specified arguments did not result in modification of repo')

    return get_repo(repo)
Beispiel #18
0
def extracted(name,
              source,
              archive_format,
              password=None,
              user=None,
              group=None,
              tar_options=None,
              zip_options=None,
              source_hash=None,
              if_missing=None,
              keep=False,
              trim_output=False,
              skip_verify=False,
              source_hash_update=None,
              use_cmd_unzip=False,
              **kwargs):
    '''
    .. versionadded:: 2014.1.0

    State that make sure an archive is extracted in a directory.
    The downloaded archive is erased if successfully extracted.
    The archive is downloaded only if necessary.

    .. note::

        If ``if_missing`` is not defined, this state will check for ``name``
        instead.  If ``name`` exists, it will assume the archive was previously
        extracted successfully and will not extract it again.

    Example, tar with flag for lmza compression:

    .. code-block:: yaml

        graylog2-server:
          archive.extracted:
            - name: /opt/
            - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.lzma
            - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
            - tar_options: J
            - archive_format: tar
            - if_missing: /opt/graylog2-server-0.9.6p1/

    Example, tar with flag for verbose output:

    .. code-block:: yaml

        graylog2-server:
          archive.extracted:
            - name: /opt/
            - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.gz
            - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
            - archive_format: tar
            - tar_options: v
            - user: root
            - group: root
            - if_missing: /opt/graylog2-server-0.9.6p1/

    Example, tar with flag for lmza compression and update based if source_hash differs from what was
    previously extracted:

    .. code-block:: yaml

        graylog2-server:
          archive.extracted:
            - name: /opt/
            - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.lzma
            - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
            - source_hash_update: true
            - tar_options: J
            - archive_format: tar
            - if_missing: /opt/graylog2-server-0.9.6p1/

    name
        Location where archive should be extracted

    password
        Password to use with password protected zip files. Currently only zip
        files with passwords are supported.

        .. versionadded:: 2016.3.0

    source
        Archive source, same syntax as file.managed source argument.

    source_hash
        Hash of source file, or file with list of hash-to-file mappings.
        It uses the same syntax as the file.managed source_hash argument.

    source_hash_update
        Set this to ``True`` if archive should be extracted if source_hash has
        changed. This would extract regardless of the ``if_missing`` parameter.

        .. versionadded:: 2016.3.0

    skip_verify:False
        If ``True``, hash verification of remote file sources (``http://``,
        ``https://``, ``ftp://``) will be skipped, and the ``source_hash``
        argument will be ignored.

        .. versionadded:: 2016.3.4

    archive_format
        ``tar``, ``zip`` or ``rar``

    user
        The user to own each extracted file.

        .. versionadded:: 2015.8.0
        .. versionchanged:: 2016.3.0
            When used in combination with ``if_missing``, ownership will only
            be enforced if ``if_missing`` is a directory.

    group
        The group to own each extracted file.

        .. versionadded:: 2015.8.0
        .. versionchanged:: 2016.3.0
            When used in combination with ``if_missing``, ownership will only
            be enforced if ``if_missing`` is a directory.

    if_missing
        If specified, this path will be checked, and if it exists then the
        archive will not be extracted. This can be helpful if the archive
        extracts all files into a subfolder. This path can be either a
        directory or a file, so this option can also be used to check for a
        semaphore file and conditionally skip extraction.

        .. versionchanged:: 2016.3.0
            When used in combination with either ``user`` or ``group``,
            ownership will only be enforced when ``if_missing`` is a directory.

    tar_options
        If ``archive_format`` is set to ``tar``, this option can be used to
        specify a string of additional arguments to pass to the tar command. If
        ``archive_format`` is set to ``tar`` and this option is *not* used,
        then the minion will attempt to use Python's native tarfile_ support to
        extract it. Python's native tarfile_ support can only handle gzip and
        bzip2 compression, however.

        .. versionchanged:: 2015.8.11,2016.3.2
            XZ-compressed archives no longer require ``J`` to manually be set
            in the ``tar_options``, they are now detected automatically and
            Salt will extract them using ``xz-utils``. This is a more
            platform-independent solution, as not all tar implementations
            support the ``J`` argument for extracting archives.

        .. note::
            Main operators like -x, --extract, --get, -c and -f/--file **should
            not be used** here.

            Using this option means that the ``tar`` command will be used,
            which is less platform-independent, so keep this in mind when using
            this option; the options must be valid options for the ``tar``
            implementation on the minion's OS.

        .. _tarfile: https://docs.python.org/2/library/tarfile.html

    zip_options
        Optional when using ``zip`` archives, ignored when usign other archives
        files. This is mostly used to overwrite exsiting files with ``o``.
        This options are only used when ``unzip`` binary is used.

        .. versionadded:: 2016.3.1

    keep
        Keep the archive in the minion's cache

    trim_output
        The number of files we should output on success before the rest are
        trimmed, if this is set to True then it will default to 100

        .. versionadded:: 2016.3.0

    use_cmd_unzip
        When archive_format is zip, setting this flag to True will use the archive.cmd_unzip module function

        .. versionadded:: Carbon

    kwargs
        kwargs to pass to the archive.unzip or archive.unrar function

        .. versionadded:: Carbon
    '''
    ret = {'name': name, 'result': None, 'changes': {}, 'comment': ''}
    valid_archives = ('tar', 'rar', 'zip')

    if archive_format not in valid_archives:
        ret['result'] = False
        ret['comment'] = '{0} is not supported, valid formats are: {1}'.format(
            archive_format, ','.join(valid_archives))
        return ret

    if not name.endswith('/'):
        name += '/'

    if __opts__['test']:
        source_match = source
    else:
        try:
            source_match = __salt__['file.source_list'](source,
                                                        source_hash,
                                                        __env__)[0]
        except CommandExecutionError as exc:
            ret['result'] = False
            ret['comment'] = exc.strerror
            return ret

    urlparsed_source = _urlparse(source_match)
    source_hash_name = urlparsed_source.path or urlparsed_source.netloc

    if if_missing is None:
        if_missing = name
    if source_hash and source_hash_update:
        if urlparsed_source.scheme != '':
            ret['result'] = False
            ret['comment'] = (
                '\'source_hash_update\' is not yet implemented for a remote '
                'source_hash'
            )
            return ret
        else:
            try:
                hash_type, hsum = source_hash.split('=')
            except ValueError:
                ret['result'] = False
                ret['comment'] = 'Invalid source_hash format'
                return ret
            source_file = '{0}.{1}'.format(os.path.basename(source), hash_type)
            hash_fname = os.path.join(__opts__['cachedir'],
                                'files',
                                __env__,
                                source_file)
            if _compare_checksum(hash_fname, name, hsum):
                ret['result'] = True
                ret['comment'] = 'Hash {0} has not changed'.format(hsum)
                return ret
    elif (
        __salt__['file.directory_exists'](if_missing)
        or __salt__['file.file_exists'](if_missing)
    ):
        ret['result'] = True
        ret['comment'] = '{0} already exists'.format(if_missing)
        return ret

    log.debug('Input seem valid so far')
    filename = os.path.join(__opts__['cachedir'],
                            'files',
                            __env__,
                            '{0}.{1}'.format(re.sub('[:/\\\\]', '_', if_missing),
                                             archive_format))

    if not os.path.exists(filename):
        if __opts__['test']:
            ret['result'] = None
            ret['comment'] = \
                '{0} {1} would be downloaded to cache'.format(
                    'One of' if not isinstance(source_match, six.string_types)
                        else 'Archive',
                    source_match
                )
            return ret

        log.debug('%s is not in cache, downloading it', source_match)

        file_result = __salt__['state.single']('file.managed',
                                               filename,
                                               source=source_match,
                                               source_hash=source_hash,
                                               makedirs=True,
                                               skip_verify=skip_verify,
                                               saltenv=__env__,
                                               source_hash_name=source_hash_name)
        log.debug('file.managed: {0}'.format(file_result))
        # get value of first key
        try:
            file_result = file_result[next(six.iterkeys(file_result))]
        except AttributeError:
            pass

        try:
            if not file_result['result']:
                log.debug('failed to download {0}'.format(source))
                return file_result
        except TypeError:
            if not file_result:
                log.debug('failed to download {0}'.format(source))
                return file_result
    else:
        log.debug('Archive %s is already in cache', source)

    if __opts__['test']:
        ret['result'] = None
        ret['comment'] = '{0} {1} would be extracted to {2}'.format(
                'One of' if not isinstance(source_match, six.string_types)
                    else 'Archive',
                source_match,
                name
            )
        return ret

    created_destdir = False
    if __salt__['file.file_exists'](name.rstrip('/')):
        ret['result'] = False
        ret['comment'] = ('{0} exists and is not a directory'
                          .format(name.rstrip('/')))
        return ret
    elif not __salt__['file.directory_exists'](name):
        __salt__['file.makedirs'](name, user=user, group=group)
        created_destdir = True

    log.debug('Extracting {0} to {1}'.format(filename, name))
    if archive_format == 'zip':
        if use_cmd_unzip:
            files = __salt__['archive.cmd_unzip'](filename, name, options=zip_options, trim_output=trim_output, **kwargs)
        else:
            files = __salt__['archive.unzip'](filename, name, options=zip_options, trim_output=trim_output, password=password, **kwargs)
    elif archive_format == 'rar':
        files = __salt__['archive.unrar'](filename, name, trim_output=trim_output, **kwargs)
    else:
        if tar_options is None:
            try:
                with closing(tarfile.open(filename, 'r')) as tar:
                    files = tar.getnames()
                    tar.extractall(name)
            except tarfile.ReadError:
                if salt.utils.which('xz'):
                    if __salt__['cmd.retcode'](['xz', '-l', filename],
                                               python_shell=False,
                                               ignore_retcode=True) == 0:
                        # XZ-compressed data
                        log.debug(
                            'Tar file is XZ-compressed, attempting '
                            'decompression and extraction using xz-utils '
                            'and the tar command'
                        )
                        # Must use python_shell=True here because not all tar
                        # implementations support the -J flag for decompressing
                        # XZ-compressed data. We need to dump the decompressed
                        # data to stdout and pipe it to tar for extraction.
                        cmd = 'xz --decompress --stdout {0} | tar xvf -'
                        results = __salt__['cmd.run_all'](
                            cmd.format(_cmd_quote(filename)),
                            cwd=name,
                            python_shell=True)
                        if results['retcode'] != 0:
                            if created_destdir:
                                _cleanup_destdir(name)
                            ret['result'] = False
                            ret['changes'] = results
                            return ret
                        if _is_bsdtar():
                            files = results['stderr']
                        else:
                            files = results['stdout']
                    else:
                        # Failed to open tar archive and it is not
                        # XZ-compressed, gracefully fail the state
                        if created_destdir:
                            _cleanup_destdir(name)
                        ret['result'] = False
                        ret['comment'] = (
                            'Failed to read from tar archive using Python\'s '
                            'native tar file support. If archive is '
                            'compressed using something other than gzip or '
                            'bzip2, the \'tar_options\' parameter may be '
                            'required to pass the correct options to the tar '
                            'command in order to extract the archive.'
                        )
                        return ret
                else:
                    if created_destdir:
                        _cleanup_destdir(name)
                    ret['result'] = False
                    ret['comment'] = (
                        'Failed to read from tar archive. If it is '
                        'XZ-compressed, install xz-utils to attempt '
                        'extraction.'
                    )
                    return ret
        else:
            try:
                tar_opts = tar_options.split(' ')
            except AttributeError:
                tar_opts = str(tar_options).split(' ')

            tar_cmd = ['tar']
            tar_shortopts = 'x'
            tar_longopts = []

            for position, opt in enumerate(tar_opts):
                if opt.startswith('-'):
                    tar_longopts.append(opt)
                else:
                    if position > 0:
                        tar_longopts.append(opt)
                    else:
                        append_opt = opt
                        append_opt = append_opt.replace('x', '').replace('f', '')
                        tar_shortopts = tar_shortopts + append_opt

            if __grains__['os'] == 'OpenBSD':
                tar_shortopts = '-' + tar_shortopts

            tar_cmd.append(tar_shortopts)
            tar_cmd.extend(tar_longopts)
            tar_cmd.extend(['-f', filename])

            results = __salt__['cmd.run_all'](tar_cmd, cwd=name, python_shell=False)
            if results['retcode'] != 0:
                ret['result'] = False
                ret['changes'] = results
                return ret
            if _is_bsdtar():
                files = results['stderr']
            else:
                files = results['stdout']
            if not files:
                files = 'no tar output so far'

    # Recursively set user and group ownership of files after extraction.
    # Note: We do this here because we might not have access to the cachedir.
    if user or group:
        if os.path.isdir(if_missing):
            recurse = []
            if user:
                recurse.append('user')
            if group:
                recurse.append('group')
            dir_result = __salt__['state.single']('file.directory',
                                                  if_missing,
                                                  user=user,
                                                  group=group,
                                                  recurse=recurse)
            log.debug('file.directory: %s', dir_result)
        elif os.path.isfile(if_missing):
            log.debug('if_missing (%s) is a file, not enforcing user/group '
                      'permissions', if_missing)

    if len(files) > 0:
        ret['result'] = True
        ret['changes']['directories_created'] = [name]
        ret['changes']['extracted_files'] = files
        ret['comment'] = '{0} extracted to {1}'.format(source_match, name)
        if not keep:
            os.unlink(filename)
        if source_hash and source_hash_update:
            _update_checksum(hash_fname, name, hash[1])

    else:
        __salt__['file.remove'](if_missing)
        ret['result'] = False
        ret['comment'] = 'Can\'t extract content of {0}'.format(source_match)
    return ret
Beispiel #19
0
def mod_repo(repo, **kwargs):
    '''
    Modify one or more values for a repo. If the repo does not exist, it will
    be created, so long as the following values are specified:

    repo or alias
        alias by which the zypper refers to the repo

    url, mirrorlist or baseurl
        the URL for zypper to reference

    enabled
        enable or disable (True or False) repository,
        but do not remove if disabled.

    refresh
        enable or disable (True or False) auto-refresh of the repository.

    cache
        Enable or disable (True or False) RPM files caching.

    gpgcheck
        Enable or disable (True or False) GOG check for this repository.

    gpgautoimport
        Automatically trust and import new repository.

    Key/Value pairs may also be removed from a repo's configuration by setting
    a key to a blank value. Bear in mind that a name cannot be deleted, and a
    url can only be deleted if a mirrorlist is specified (or vice versa).

    CLI Examples:

    .. code-block:: bash

        salt '*' pkg.mod_repo alias alias=new_alias
        salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/
    '''

    repos_cfg = _get_configured_repos()
    added = False

    # An attempt to add new one?
    if repo not in repos_cfg.sections():
        url = kwargs.get('url', kwargs.get('mirrorlist',
                                           kwargs.get('baseurl')))
        if not url:
            raise CommandExecutionError(
                'Repository \'{0}\' not found, and neither \'baseurl\' nor '
                '\'mirrorlist\' was specified'.format(repo))

        if not _urlparse(url).scheme:
            raise CommandExecutionError(
                'Repository \'{0}\' not found and URL for baseurl/mirrorlist '
                'is malformed'.format(repo))

        # Is there already such repo under different alias?
        for alias in repos_cfg.sections():
            repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg)

            # Complete user URL, in case it is not
            new_url = _urlparse(url)
            if not new_url.path:
                new_url = _urlparse.ParseResult(
                    scheme=new_url.scheme,  # pylint: disable=E1123
                    netloc=new_url.netloc,
                    path='/',
                    params=new_url.params,
                    query=new_url.query,
                    fragment=new_url.fragment)
            base_url = _urlparse(repo_meta['baseurl'])

            if new_url == base_url:
                raise CommandExecutionError(
                    'Repository \'{0}\' already exists as \'{1}\'.'.format(
                        repo, alias))

        # Add new repo
        doc = None
        try:
            # Try to parse the output and find the error,
            # but this not always working (depends on Zypper version)
            ret = __salt__['cmd.run'](_zypper('-x', 'ar', url, repo),
                                      output_loglevel='trace',
                                      python_shell=False)
            doc = dom.parseString(_zypper_check_result(ret, xml=True))
        except Exception:
            # No XML out available, but the the result is still unknown
            pass

        if doc:
            msg_nodes = doc.getElementsByTagName('message')
            if msg_nodes:
                msg_node = msg_nodes[0]
                if msg_node.getAttribute('type') == 'error':
                    raise CommandExecutionError(
                        msg_node.childNodes[0].nodeValue)

        # Verify the repository has been added
        repos_cfg = _get_configured_repos()
        if repo not in repos_cfg.sections():
            raise CommandExecutionError(
                'Failed add new repository \'{0}\' for unspecified reason. '
                'Please check zypper logs.'.format(repo))
        added = True

    # Modify added or existing repo according to the options
    cmd_opt = []

    if 'enabled' in kwargs:
        cmd_opt.append(kwargs['enabled'] and '--enable' or '--disable')

    if 'refresh' in kwargs:
        cmd_opt.append(kwargs['refresh'] and '--refresh' or '--no-refresh')

    if 'cache' in kwargs:
        cmd_opt.append(kwargs['cache'] and '--keep-packages'
                       or '--no-keep-packages')

    if 'gpgcheck' in kwargs:
        cmd_opt.append(kwargs['gpgcheck'] and '--gpgcheck' or '--no-gpgcheck')

    if kwargs.get('gpgautoimport') is True:
        cmd_opt.append('--gpg-auto-import-keys')

    if 'priority' in kwargs:
        cmd_opt.append("--priority='{0}'".format(
            kwargs.get('priority', DEFAULT_PRIORITY)))

    if 'humanname' in kwargs:
        cmd_opt.append("--name='{0}'".format(kwargs.get('humanname')))

    if cmd_opt:
        cmd_opt.append(repo)
        ret = __salt__['cmd.run_all'](_zypper('-x', 'mr', *cmd_opt),
                                      python_shell=False,
                                      output_loglevel='trace')
        _zypper_check_result(ret, xml=True)

    # If repo nor added neither modified, error should be thrown
    if not added and not cmd_opt:
        raise CommandExecutionError(
            'Specified arguments did not result in modification of repo')

    return get_repo(repo)
Beispiel #20
0
def extracted(
        name,
        source=None,
        source_hash='',
        skip_verify=False,
        archive_format=None,
        archive_compression=None,
        archive_files=None,
        archive_toplevel=None,
        makedirs=False,
        clean=True,
        include_pat=None,
        exclude_pat=None,
        include_files=None,
        exclude_files=None,
        force=False,
        user=None,
        group=None,
        mode=None,
        dir_mode=None,
        file_mode=None,
        recurse=None,
        max_depth=None,
        **kwargs):
    r'''
    Ensure that an archive file is extracted to a directory.

    This is an alternative to the upstream :mod:``archive.extracted
    <salt.states.archive.extracted>`` state. Rather than defaulting
    to libtar, this function exclusively uses system commands.

    Instead of extracting directly on top of a directory, the ``source`` file
    is first extracted to a temporary cache directory. When only a subset of
    files are needed, the ``archive_files`` and ``archive_toplevel`` arguments
    can be used to affect the extraction system command.

    A recursive diff is then performed against the cache and ``name`` to
    derive a list of files to manage. Files that do not exist in ``name`` or
    that need to be replaced are copied from cache. If ``clean`` is ``True``,
    files that do not exist in the cache are deleted from ``name``. The
    ``include_pat``, ``include_files``, ``exclude_pat``, and ``exclude_files``
    arguments affect the diff and are applied to both sides.

    name
        Directory into which the archive should be extracted.

    source
        The location of the archive file to be extracted.

    source_hash
        This can be one of the following:
            1. a source hash string
            2. the URI of a file that contains source hash strings

        The function accepts the first encountered long unbroken alphanumeric
        string of correct length as a valid hash, in order from most secure to
        least secure:

        .. code-block:: text

            Type    Length
            ======  ======
            sha512     128
            sha384      96
            sha256      64
            sha224      56
            sha1        40
            md5         32

        See the ``source_hash`` argument description in :mod:`file.managed
        <salt.states.file.managed>` for more details and examples.

    skip_verify : False
        If ``True``, hash verification of remote file sources (``http://``,
        ``https://``, ``ftp://``) will be skipped, and the ``source_hash``
        argument will be ignored.

    archive_format
        One of ``tar`` or ``zip``.

        If omitted, this will be guessed from the ``source`` argument.

    archive_compression
        Compression algorithm used with ``tar`` format; one of ``gz``,
        ``bz2``, or ``xz``.

        If omitted, this will be guessed from the ``source`` argument.
        Defaults to no compression.

    archive_files
        A list of files to extract from the archive, relative to
        ``archive_toplevel``.

        .. note::
            To ensure consistent behavior (especially with ``unzip``),
            directories should be suffixed with ``/*``.

    archive_toplevel
        The topmost subdirectory to extract, defaulting to all files.

        This is also useful when an archive extracts to a root directory named
        differently than what the archive file name might suggest.

    makedirs : False
        If set to ``True``, then the parent directories will be created to
        facilitate the creation of the named directory. If ``False``, and the
        parent directory of the extracted directory doesn't exist, the state
        will fail.

    clean : True
        If set to ``True``, remove files that exist in ``name``, but were not
        extracted from the ``source`` file.

    include_pat
        Include this pattern in the recursive diff used to determine which
        files to create, change, or remove. Relative to both ``name`` and
        ``archive_toplevel``. May be a file glob or a regex pattern (prepended
        with ``E@``).

        Appended to the ``include_files`` list of patterns.

    exclude_pat
        Exclude this pattern from the recursive diff used to determine which
        files to create, change, or remove. Relative to both ``name`` and
        ``archive_toplevel``. May be a file glob or a regex pattern (prepended
        with ``E@``).

        Appended to the ``exclude_files`` list of patterns.

    include_files
        Include this pattern in the recursive diff used to determine which
        files to create, change, or remove. Relative to both ``name`` and
        ``archive_toplevel``. May be a file glob or a regex pattern (prepended
        with ``E@``).

        Default behavior is to include all files.

    exclude_files
        Exclude this pattern from the recursive diff used to determine which
        files to create, change, or remove. Relative to both ``name`` and
        ``archive_toplevel``. May be a file glob or a regex pattern (prepended
        with ``E@``).

    force : False
        If set to ``True``, remove ``name`` prior to extraction.

    user
        The user to own the directory. This defaults to the user salt is
        running as on the minion.

    group
        The group ownership set for the directory. This defaults to the group
        salt is running as on the minion.

    dir_mode / mode
        The permissions mode to set on any directories created.

    file_mode
        The permissions mode to set on any files created.

    recurse
        Enforce user/group ownership and mode of directory recursively.

        See the ``recurse`` argument description in :mod:`file.directory
        <salt.states.file.directory>` for more details and examples.

    max_depth
        Limit the ``recurse`` depth. The default is no limit.
    '''
    ret = {'name': name, 'result': False, 'changes': {}, 'comment': ''}
    kwargs = salt.utils.args.clean_kwargs(**kwargs)  # remove __* keys
    if salt.utils.platform.is_windows():
        ret['comment'] = 'file.extracted does not support Windows'
        return ret
    try:
        source_match = __salt__['file.source_list'](
            source,
            source_hash,
            __env__
        )[0]
    except CommandExecutionError as exc:
        ret['comment'] = exc.strerror
        return ret
    urlparsed_source = _urlparse(source_match)
    source_basename = urlparsed_source.path or urlparsed_source.netloc
    source_is_local = urlparsed_source.scheme in ('', 'file')
    if source_is_local:
        # trim "file://" from start of source_match
        source_match = urlparsed_source.path
        if not os.path.isfile(source_match):
            ret['comment'] = (
                'Source file \'{0}\' does not exist'.format(source_match)
            )
            return ret
        cached_source = source_match
    else:
        cached_source = os.path.join(
            __opts__['cachedir'],
            'files',
            __env__,
            re.sub(r'[:/\\]', '_', source_basename),
        )
        if os.path.isdir(cached_source):
            # cache is not a file, so clear it to avoid traceback
            salt.utils.files.rm_rf(cached_source)
        if os.path.exists(cached_source):
            if source_hash:
                try:
                    res = __salt__['file.get_source_sum'](
                        source_hash=source_hash
                    )
                except CommandExecutionError as exc:
                    ret['comment'] = exc.strerror
                    return ret
                hash_type = res['hash_type']
                hsum = res['hsum']
                try:
                    res = __salt__['file.get_sum'](
                        cached_source,
                        form=hash_type
                    )
                except CommandExecutionError as exc:
                    ret['comment'] = exc.strerror
                    return ret
                cached_hsum = res
                if hsum != cached_hsum:
                    salt.utils.files.rm_rf(cached_source)
            else:
                salt.utils.files.rm_rf(cached_source)
    if not os.path.exists(cached_source):
        opts_force = __opts__
        opts_force['test'] = False
        res = __states__['file.managed'](
            cached_source,
            source=source,
            source_hash=source_hash,
            skip_verify=skip_verify,
            makedirs=True,
            __opts__=opts_force,
        )
        log.debug('file.managed: {0}'.format(res))
        if not res['result'] and res['result'] is not None:
            return res
    try:
        res = __salt__['file.extract'](
            name,
            cached_source,
            archive_format=archive_format,
            archive_compression=archive_compression,
            archive_files=archive_files,
            archive_toplevel=archive_toplevel,
            makedirs=makedirs,
            clean=clean,
            include_pat=include_pat,
            exclude_pat=exclude_pat,
            include_files=include_files,
            exclude_files=exclude_files,
            force=force,
        )
    except CommandExecutionError as exc:
        ret['comment'] = exc.strerror
        return ret
    if __opts__['test']:
        ret['result'] = None
        if res:
            ret['comment'] = ['Changes would have been made:', '  diff:']
            for line in res.split('\n'):
                ret['comment'].append(re.sub('^', '    ', line))
            ret['comment'] = '\n'.join(ret['comment'])
            return ret
        else:
            ret['comment'] = ['The extracted archive {0} is in the correct state.'.format(name)]
            return ret
    if res:
        ret['changes']['diff'] = res
    if os.path.isdir(name):
        res = __states__['file.directory'](
            name=name,
            makedirs=makedirs,
            clean=False,
            user=user,
            group=group,
            mode=mode,
            dir_mode=dir_mode,
            file_mode=file_mode,
            recurse=recurse,
            max_depth=max_depth,
        )
        log.debug('file.directory: {0}'.format(res))
        for attr in ('user', 'group', 'mode'):
            if attr in res['changes']:
                ret['changes'][attr] = res['changes'][attr]
    if not res['result']:
        ret['comment'] = res['comment']
        return ret
    ret['result'] = True
    ret['comment'] = 'Archive has been extracted to {0}'.format(name)
    return ret
Beispiel #21
0
def list_(name,
          archive_format=None,
          options=None,
          clean=False,
          verbose=False,
          saltenv='base'):
    '''
    .. versionadded:: 2016.11.0

    List the files and directories in an tar, zip, or rar archive.

    .. note::
        This function will only provide results for XZ-compressed archives if
        xz-utils_ is installed, as Python does not at this time natively
        support XZ compression in its tarfile_ module.

    name
        Path/URL of archive

    archive_format
        Specify the format of the archive (``tar``, ``zip``, or ``rar``). If
        this argument is omitted, the archive format will be guessed based on
        the value of the ``name`` parameter.

    options
        **For tar archives only.** This function will, by default, try to use
        the tarfile_ module from the Python standard library to get a list of
        files/directories. If this method fails, then it will fall back to
        using the shell to decompress the archive to stdout and pipe the
        results to ``tar -tf -`` to produce a list of filenames. XZ-compressed
        archives are already supported automatically, but in the event that the
        tar archive uses a different sort of compression not supported natively
        by tarfile_, this option can be used to specify a command that will
        decompress the archive to stdout. For example:

        .. code-block:: bash

            salt minion_id archive.list /path/to/foo.tar.gz options='gzip --decompress --stdout'

        .. note::
            It is not necessary to manually specify options for gzip'ed
            archives, as gzip compression is natively supported by tarfile_.

    clean : False
        Set this value to ``True`` to delete the path referred to by ``name``
        once the contents have been listed. This option should be used with
        care.

        .. note::
            If there is an error listing the archive's contents, the cached
            file will not be removed, to allow for troubleshooting.

    verbose : False
        If ``False``, this function will return a list of files/dirs in the
        archive. If ``True``, it will return a dictionary categorizing the
        paths into separate keys containing the directory names, file names,
        and also directories/files present in the top level of the archive.

    saltenv : base
        Specifies the fileserver environment from which to retrieve
        ``archive``. This is only applicable when ``archive`` is a file from
        the ``salt://`` fileserver.

    .. _tarfile: https://docs.python.org/2/library/tarfile.html
    .. _xz-utils: http://tukaani.org/xz/

    CLI Examples:

    .. code-block:: bash

            salt '*' archive.list /path/to/myfile.tar.gz
            salt '*' archive.list salt://foo.tar.gz
            salt '*' archive.list https://domain.tld/myfile.zip
            salt '*' archive.list ftp://10.1.2.3/foo.rar
    '''
    def _list_tar(name, cached, decompress_cmd):
        '''
        List the contents of a tar archive.
        '''
        try:
            with contextlib.closing(tarfile.open(cached)) as tar_archive:
                return [
                    x.name + '/' if x.isdir() else x.name
                    for x in tar_archive.getmembers()
                ]
        except tarfile.ReadError:
            if not salt.utils.which('tar'):
                raise CommandExecutionError('\'tar\' command not available')
            if decompress_cmd is not None:
                # Guard against shell injection
                try:
                    decompress_cmd = ' '.join(
                        [_quote(x) for x in shlex.split(decompress_cmd)]
                    )
                except AttributeError:
                    raise CommandExecutionError('Invalid CLI options')
            else:
                if salt.utils.which('xz') \
                        and __salt__['cmd.retcode'](['xz', '-l', cached],
                                                    python_shell=False,
                                                    ignore_retcode=True) == 0:
                    decompress_cmd = 'xz --decompress --stdout'

            if decompress_cmd:
                cmd = '{0} {1} | tar tf -'.format(decompress_cmd, _quote(cached))
                result = __salt__['cmd.run_all'](cmd, python_shell=True)
                if result['retcode'] != 0:
                    raise CommandExecutionError(
                        'Failed to decompress {0}'.format(name),
                        info={'error': result['stderr']}
                    )
                ret = []
                for line in salt.utils.itertools.split(result['stdout'], '\n'):
                    line = line.strip()
                    if line:
                        ret.append(line)
                return ret

        raise CommandExecutionError(
            'Unable to list contents of {0}. If this is an XZ-compressed tar '
            'archive, install xz-utils to enable listing its contents. If it '
            'is compressed using something other than XZ, it may be necessary '
            'to specify CLI options to decompress the archive. See the '
            'documentation for details.'.format(name)
        )

    def _list_zip(name, cached):
        '''
        List the contents of a zip archive.
        '''
        # Password-protected ZIP archives can still be listed by zipfile, so
        # there is no reason to invoke the unzip command.
        try:
            with contextlib.closing(zipfile.ZipFile(cached)) as zip_archive:
                return zip_archive.namelist()
        except zipfile.BadZipfile:
            raise CommandExecutionError('{0} is not a ZIP file'.format(name))

    def _list_rar(name, cached):
        '''
        List the contents of a rar archive.
        '''
        output = __salt__['cmd.run'](
            ['rar', 'lt', path],
            python_shell=False,
            ignore_retcode=False)
        matches = re.findall(r'Name:\s*([^\n]+)\s*Type:\s*([^\n]+)', output)
        ret = [x + '/' if y == 'Directory' else x for x, y in matches]
        if not ret:
            raise CommandExecutionError(
                'Failed to decompress {0}'.format(name),
                info={'error': output}
            )
        return ret

    cached = __salt__['cp.cache_file'](name, saltenv)
    if not cached:
        raise CommandExecutionError('Failed to cache {0}'.format(name))

    try:
        parsed = _urlparse(name)
        path = parsed.path or parsed.netloc

        def _unsupported_format(archive_format):
            '''
            Raise the proper exception message for the given archive format.
            '''
            if archive_format is None:
                raise CommandExecutionError(
                    'Unable to guess archive format, please pass an '
                    '\'archive_format\' argument.'
                )
            raise CommandExecutionError(
                'Unsupported archive format \'{0}\''.format(archive_format)
            )

        if not archive_format:
            guessed_format = salt.utils.files.guess_archive_type(path)
            if guessed_format is None:
                _unsupported_format(archive_format)
            archive_format = guessed_format

        func = locals().get('_list_' + archive_format)
        if not hasattr(func, '__call__'):
            _unsupported_format(archive_format)

        args = (options,) if archive_format == 'tar' else ()
        try:
            ret = func(name, cached, *args)
        except (IOError, OSError) as exc:
            raise CommandExecutionError(
                'Failed to list contents of {0}: {1}'.format(
                    name, exc.__str__()
                )
            )
        except CommandExecutionError as exc:
            raise
        except Exception as exc:
            raise CommandExecutionError(
                'Uncaught exception \'{0}\' when listing contents of {1}'
                .format(exc, name)
            )

        if clean:
            try:
                os.remove(cached)
                log.debug('Cleaned cached archive %s', cached)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    log.warning(
                        'Failed to clean cached archive %s: %s',
                        cached, exc.__str__()
                    )
        if verbose:
            verbose_ret = {'dirs': [],
                           'files': [],
                           'top_level_dirs': [],
                           'top_level_files': []}
            for item in ret:
                if item.endswith('/'):
                    verbose_ret['dirs'].append(item)
                    if item.count('/') == 1:
                        verbose_ret['top_level_dirs'].append(item)
                else:
                    verbose_ret['files'].append(item)
                    if item.count('/') == 0:
                        verbose_ret['top_level_files'].append(item)
            ret = verbose_ret
        return ret
    except CommandExecutionError as exc:
        # Reraise with cache path in the error so that the user can examine the
        # cached archive for troubleshooting purposes.
        info = exc.info or {}
        info['archive location'] = cached
        raise CommandExecutionError(exc.error, info=info)
Beispiel #22
0
def extracted(name,
              source,
              source_hash=None,
              source_hash_update=False,
              skip_verify=False,
              password=None,
              options=None,
              list_options=None,
              force=False,
              user=None,
              group=None,
              if_missing=None,
              keep=False,
              trim_output=False,
              use_cmd_unzip=None,
              extract_perms=True,
              enforce_toplevel=True,
              enforce_ownership_on=None,
              archive_format=None,
              **kwargs):
    '''
    .. versionadded:: 2014.1.0
    .. versionchanged:: 2016.11.0
        This state has been rewritten. Some arguments are new to this release
        and will not be available in the 2016.3 release cycle (and earlier).
        Additionally, the **ZIP Archive Handling** section below applies
        specifically to the 2016.11.0 release (and newer).

    Ensure that an archive is extracted to a specific directory.

    .. important::
        **ZIP Archive Handling**

        Salt has two different functions for extracting ZIP archives:

        1. :py:func:`archive.unzip <salt.modules.archive.unzip>`, which uses
           Python's zipfile_ module to extract ZIP files.
        2. :py:func:`archive.cmd_unzip <salt.modules.archive.cmd_unzip>`, which
           uses the ``unzip`` CLI command to extract ZIP files.

        Salt will prefer the use of :py:func:`archive.cmd_unzip
        <salt.modules.archive.cmd_unzip>` when CLI options are specified (via
        the ``options`` argument), and will otherwise prefer the
        :py:func:`archive.unzip <salt.modules.archive.unzip>` function. Use
        of :py:func:`archive.cmd_unzip <salt.modules.archive.cmd_unzip>` can be
        forced however by setting the ``use_cmd_unzip`` argument to ``True``.
        By contrast, setting this argument to ``False`` will force usage of
        :py:func:`archive.unzip <salt.modules.archive.unzip>`. For example:

        .. code-block:: yaml

            /var/www:
              archive.extracted:
                - source: salt://foo/bar/myapp.zip
                - use_cmd_unzip: True

        When ``use_cmd_unzip`` is omitted, Salt will choose which extraction
        function to use based on the source archive and the arguments passed to
        the state. When in doubt, simply do not set this argument; it is
        provided as a means of overriding the logic Salt uses to decide which
        function to use.

        There are differences in the features available in both extraction
        functions. These are detailed below.

        - *Command-line options* (only supported by :py:func:`archive.cmd_unzip
          <salt.modules.archive.cmd_unzip>`) - When the ``options`` argument is
          used, :py:func:`archive.cmd_unzip <salt.modules.archive.cmd_unzip>`
          is the only function that can be used to extract the archive.
          Therefore, if ``use_cmd_unzip`` is specified and set to ``False``,
          and ``options`` is also set, the state will not proceed.

        - *Password-protected ZIP Archives* (only supported by
          :py:func:`archive.unzip <salt.modules.archive.unzip>`) -
          :py:func:`archive.cmd_unzip <salt.modules.archive.cmd_unzip>` is not
          be permitted to extract password-protected ZIP archives, as
          attempting to do so will cause the unzip command to block on user
          input. The :py:func:`archive.is_encrypted
          <salt.modules.archive.unzip>` function will be used to determine if
          the archive is password-protected. If it is, then the ``password``
          argument will be required for the state to proceed. If
          ``use_cmd_unzip`` is specified and set to ``True``, then the state
          will not proceed.

        - *Permissions* - Due to an `upstream bug in Python`_, permissions are
          not preserved when the zipfile_ module is used to extract an archive.
          As of the 2016.11.0 release, :py:func:`archive.unzip
          <salt.modules.archive.unzip>` (as well as this state) has an
          ``extract_perms`` argument which, when set to ``True`` (the default),
          will attempt to match the permissions of the extracted
          files/directories to those defined within the archive. To disable
          this functionality and have the state not attempt to preserve the
          permissions from the ZIP archive, set ``extract_perms`` to ``False``:

          .. code-block:: yaml

              /var/www:
                archive.extracted:
                  - source: salt://foo/bar/myapp.zip
                  - extract_perms: False

    .. _`upstream bug in Python`: https://bugs.python.org/issue15795

    name
        Directory into which the archive should be extracted

    source
        Archive to be extracted

        .. note::
            This argument uses the same syntax as its counterpart in the
            :py:func:`file.managed <salt.states.file.managed>` state.

    source_hash
        Hash of source file, or file with list of hash-to-file mappings

        .. note::
            This argument uses the same syntax as its counterpart in the
            :py:func:`file.managed <salt.states.file.managed>` state.

    source_hash_update
        Set this to ``True`` if archive should be extracted if source_hash has
        changed. This would extract regardless of the ``if_missing`` parameter.

        .. versionadded:: 2016.3.0

    skip_verify : False
        If ``True``, hash verification of remote file sources (``http://``,
        ``https://``, ``ftp://``) will be skipped, and the ``source_hash``
        argument will be ignored.

        .. versionadded:: 2016.3.4

    password
        **For ZIP archives only.** Password used for extraction.

        .. versionadded:: 2016.3.0

    options
        **For tar and zip archives only.**  This option can be used to specify
        a string of additional arguments to pass to the tar/zip command.

        If this argument is not used, then the minion will attempt to use
        Python's native tarfile_/zipfile_ support to extract it. For zip
        archives, this argument is mostly used to overwrite exsiting files with
        ``o``.

        Using this argument means that the ``tar`` or ``unzip`` command will be
        used, which is less platform-independent, so keep this in mind when
        using this option; the CLI options must be valid options for the
        ``tar``/``unzip`` implementation on the minion's OS.

        .. versionadded:: 2016.11.0
            The ``tar_options`` and ``zip_options`` parameters have been
            deprecated in favor of a single argument name.
        .. versionchanged:: 2015.8.11,2016.3.2
            XZ-compressed tar archives no longer require ``J`` to manually be
            set in the ``options``, they are now detected automatically and
            decompressed using xz-utils_ and extracted using ``tar xvf``. This
            is a more platform-independent solution, as not all tar
            implementations support the ``J`` argument for extracting archives.

        .. note::
            For tar archives, main operators like ``-x``, ``--extract``,
            ``--get``, ``-c`` and ``-f``/``--file`` should *not* be used here.

    tar_options
        .. deprecated:: 2016.11.0
            Use ``options`` instead.

    zip_options
        .. versionadded:: 2016.3.1
        .. deprecated:: 2016.11.0
            Use ``options`` instead.

    list_options
        **For tar archives only.** This state uses :py:func:`archive.list
        <salt.modules.archive.list_>` to discover the contents of the source
        archive so that it knows which file paths should exist on the minion if
        the archive has already been extracted. For the vast majority of tar
        archives, :py:func:`archive.list <salt.modules.archive.list_>` "just
        works". Archives compressed using gzip, bzip2, and xz/lzma (with the
        help of xz-utils_) are supported automatically. However, for archives
        compressed using other compression types, CLI options must be passed to
        :py:func:`archive.list <salt.modules.archive.list_>`.

        This argument will be passed through to :py:func:`archive.list
        <salt.modules.archive.list_>` as its ``options`` argument, to allow it
        to successfully list the archive's contents. For the vast majority of
        archives, this argument should not need to be used, it should only be
        needed in cases where the state fails with an error stating that the
        archive's contents could not be listed.

        .. versionadded:: 2016.11.0

    force : False
        If a path that should be occupied by a file in the extracted result is
        instead a directory (or vice-versa), the state will fail. Set this
        argument to ``True`` to force these paths to be removed in order to
        allow the archive to be extracted.

        .. warning::
            Use this option *very* carefully.

        .. versionadded:: 2016.11.0

    user
        The user to own each extracted file. Not available on Windows.

        .. versionadded:: 2015.8.0
        .. versionchanged:: 2016.3.0
            When used in combination with ``if_missing``, ownership will only
            be enforced if ``if_missing`` is a directory.
        .. versionchanged:: 2016.11.0
            Ownership will be enforced only on the file/directory paths found
            by running :py:func:`archive.list <salt.modules.archive.list_>` on
            the source archive. An alternative root directory on which to
            enforce ownership can be specified using the
            ``enforce_ownership_on`` argument.

    group
        The group to own each extracted file. Not available on Windows.

        .. versionadded:: 2015.8.0
        .. versionchanged:: 2016.3.0
            When used in combination with ``if_missing``, ownership will only
            be enforced if ``if_missing`` is a directory.
        .. versionchanged:: 2016.11.0
            Ownership will be enforced only on the file/directory paths found
            by running :py:func:`archive.list <salt.modules.archive.list_>` on
            the source archive. An alternative root directory on which to
            enforce ownership can be specified using the
            ``enforce_ownership_on`` argument.

    if_missing
        If specified, this path will be checked, and if it exists then the
        archive will not be extracted. This path can be either a directory or a
        file, so this option can also be used to check for a semaphore file and
        conditionally skip extraction.

        .. versionchanged:: 2016.3.0
            When used in combination with either ``user`` or ``group``,
            ownership will only be enforced when ``if_missing`` is a directory.
        .. versionchanged:: 2016.11.0
            Ownership enforcement is no longer tied to this argument, it is
            simply checked for existence and extraction will be skipped if
            if is present.

    keep : False
        For ``source`` archives not local to the minion (i.e. from the Salt
        fileserver or a remote source such as ``http(s)`` or ``ftp``), Salt
        will need to download the archive to the minion cache before they can
        be extracted. After extraction, these source archives will be removed
        unless this argument is set to ``True``.

    trim_output : False
        Useful for archives with many files in them. This can either be set to
        ``True`` (in which case only the first 100 files extracted will be
        in the state results), or it can be set to an integer for more exact
        control over the max number of files to include in the state results.

        .. versionadded:: 2016.3.0

    use_cmd_unzip : False
        Set to ``True`` for zip files to force usage of the
        :py:func:`archive.cmd_unzip <salt.modules.archive.cmd_unzip>` function
        to extract.

        .. versionadded:: 2016.11.0

    extract_perms : True
        **For ZIP archives only.** When using :py:func:`archive.unzip
        <salt.modules.archive.unzip>` to extract ZIP archives, Salt works
        around an `upstream bug in Python`_ to set the permissions on extracted
        files/directories to match those encoded into the ZIP archive. Set this
        argument to ``False`` to skip this workaround.

        .. versionadded:: 2016.11.0

    enforce_toplevel : True
        This option will enforce a single directory at the top level of the
        source archive, to prevent extracting a 'tar-bomb'. Set this argument
        to ``False`` to allow archives with files (or multiple directories) at
        the top level to be extracted.

        .. versionadded:: 2016.11.0

    enforce_ownership_on
        When ``user`` or ``group`` is specified, Salt will default to enforcing
        permissions on the file/directory paths detected by running
        :py:func:`archive.list <salt.modules.archive.list_>` on the source
        archive. Use this argument to specify an alternate directory on which
        ownership should be enforced.

        .. note::
            This path must be within the path specified by the ``name``
            argument.

        .. versionadded:: 2016.11.0

    archive_format
        One of ``tar``, ``zip``, or ``rar``.

        .. versionchanged:: 2016.11.0
            If omitted, the archive format will be guessed based on the value
            of the ``source`` argument.

    .. _tarfile: https://docs.python.org/2/library/tarfile.html
    .. _zipfile: https://docs.python.org/2/library/zipfile.html
    .. _xz-utils: http://tukaani.org/xz/

    **Examples**

    1. tar with lmza (i.e. xz) compression:

       .. code-block:: yaml

           graylog2-server:
             archive.extracted:
               - name: /opt/
               - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.lzma
               - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6

    2. tar archive with flag for verbose output, and enforcement of user/group
       ownership:

       .. code-block:: yaml

           graylog2-server:
             archive.extracted:
               - name: /opt/
               - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.gz
               - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
               - tar_options: v
               - user: foo
               - group: foo

    3. tar archive, with ``source_hash_update`` set to ``True`` to prevent
       state from attempting extraction unless the ``source_hash`` differs
       from the previous time the archive was extracted:

       .. code-block:: yaml

           graylog2-server:
             archive.extracted:
               - name: /opt/
               - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.lzma
               - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
               - source_hash_update: True
    '''
    ret = {'name': name, 'result': False, 'changes': {}, 'comment': ''}

    # Remove pub kwargs as they're irrelevant here.
    kwargs = salt.utils.clean_kwargs(**kwargs)

    if not _path_is_abs(name):
        ret['comment'] = '{0} is not an absolute path'.format(name)
        return ret
    else:
        if name is None:
            # Only way this happens is if some doofus specifies "- name: None"
            # in their SLS file. Prevent tracebacks by failing gracefully.
            ret['comment'] = 'None is not a valid directory path'
            return ret
        # os.path.isfile() returns False when there is a trailing slash, hence
        # our need for first stripping the slash and then adding it back later.
        # Otherwise, we can't properly check if the extraction location both a)
        # exists and b) is a file.
        #
        # >>> os.path.isfile('/tmp/foo.txt')
        # True
        # >>> os.path.isfile('/tmp/foo.txt/')
        # False
        name = name.rstrip('/')
        if os.path.isfile(name):
            ret['comment'] = '{0} exists and is not a directory'.format(name)
            return ret
        # Add back the slash so that file.makedirs properly creates the
        # destdir if it needs to be created. file.makedirs expects a trailing
        # slash in the directory path.
        name += '/'
    if not _path_is_abs(if_missing):
        ret['comment'] = 'Value for \'if_missing\' is not an absolute path'
        return ret
    if not _path_is_abs(enforce_ownership_on):
        ret['comment'] = ('Value for \'enforce_ownership_on\' is not an '
                          'absolute path')
        return ret
    else:
        if enforce_ownership_on is not None:
            try:
                not_rel = os.path.relpath(enforce_ownership_on,
                                          name).startswith('..' + os.sep)
            except Exception:
                # A ValueError is raised on Windows when the paths passed to
                # os.path.relpath are not on the same drive letter. Using a
                # generic Exception here to keep other possible exception types
                # from making this state blow up with a traceback.
                not_rel = True
            if not_rel:
                ret['comment'] = (
                    'Value for \'enforce_ownership_on\' must be within {0}'.
                    format(name))
                return ret

    if user or group:
        if salt.utils.is_windows():
            ret['comment'] = \
                'User/group ownership cannot be enforced on Windows minions'
            return ret

        if user:
            uid = __salt__['file.user_to_uid'](user)
            if not uid:
                ret['comment'] = 'User {0} does not exist'.format(user)
                return ret
        else:
            uid = -1

        if group:
            gid = __salt__['file.group_to_gid'](group)
            if not gid:
                ret['comment'] = 'Group {0} does not exist'.format(group)
                return ret
        else:
            gid = -1
    else:
        # We should never hit the ownership enforcement code unless user or
        # group was specified, but just in case, set uid/gid to -1 to make the
        # os.chown() a no-op and avoid a NameError.
        uid = gid = -1

    if source_hash_update and not source_hash:
        ret.setdefault('warnings', []).append(
            'The \'source_hash_update\' argument is ignored when '
            '\'source_hash\' is not also specified.')

    try:
        source_match = __salt__['file.source_list'](source, source_hash,
                                                    __env__)[0]
    except CommandExecutionError as exc:
        ret['result'] = False
        ret['comment'] = exc.strerror
        return ret

    urlparsed_source = _urlparse(source_match)
    source_hash_name = urlparsed_source.path or urlparsed_source.netloc

    valid_archive_formats = ('tar', 'rar', 'zip')
    if not archive_format:
        archive_format = salt.utils.files.guess_archive_type(source_hash_name)
        if archive_format is None:
            ret['comment'] = (
                'Could not guess archive_format from the value of the '
                '\'source\' argument. Please set this archive_format to one '
                'of the following: {0}'.format(
                    ', '.join(valid_archive_formats)))
            return ret
    try:
        archive_format = archive_format.lower()
    except AttributeError:
        pass
    if archive_format not in valid_archive_formats:
        ret['comment'] = (
            'Invalid archive_format \'{0}\'. Either set it to a supported '
            'value ({1}) or remove this argument and the archive format will '
            'be guesseed based on file extension.'.format(
                archive_format,
                ', '.join(valid_archive_formats),
            ))
        return ret

    tar_options = kwargs.pop('tar_options', None)
    zip_options = kwargs.pop('zip_options', None)
    if tar_options:
        msg = ('The \'tar_options\' argument has been deprecated, please use '
               '\'options\' instead.')
        salt.utils.warn_until('Oxygen', msg)
        ret.setdefault('warnings', []).append(msg)
        options = tar_options
    elif zip_options:
        msg = ('The \'zip_options\' argument has been deprecated, please use '
               '\'options\' instead.')
        salt.utils.warn_until('Oxygen', msg)
        ret.setdefault('warnings', []).append(msg)
        options = zip_options

    if archive_format == 'zip':
        if options:
            if use_cmd_unzip is None:
                log.info(
                    'Presence of CLI options in archive.extracted state for '
                    '\'%s\' implies that use_cmd_unzip is set to True.', name)
                use_cmd_unzip = True
            elif not use_cmd_unzip:
                # use_cmd_unzip explicitly disabled
                ret['comment'] = (
                    '\'use_cmd_unzip\' cannot be set to False if CLI options '
                    'are being specified (via the \'options\' argument). '
                    'Either remove \'use_cmd_unzip\', or set it to True.')
                return ret
        if password:
            if use_cmd_unzip is None:
                log.info(
                    'Presence of a password in archive.extracted state for '
                    '\'%s\' implies that use_cmd_unzip is set to False.', name)
                use_cmd_unzip = False
            elif use_cmd_unzip:
                ret.setdefault('warnings', []).append(
                    'Using a password in combination with setting '
                    '\'use_cmd_unzip\' to True is considered insecure. It is '
                    'recommended to remove the \'use_cmd_unzip\' argument (or '
                    'set it to False) and allow Salt to extract the archive '
                    'using Python\'s built-in ZIP file support.')
    else:
        if password:
            ret['comment'] = \
                'The \'password\' argument is only supported for zip archives'
            return ret

    supports_options = ('tar', 'zip')
    if options and archive_format not in supports_options:
        ret['comment'] = (
            'The \'options\' argument is only compatible with the following '
            'archive formats: {0}'.format(', '.join(supports_options)))
        return ret

    if trim_output and not isinstance(trim_output, (bool, six.integer_types)):
        try:
            # Try to handle cases where trim_output was passed as a
            # string-ified integer.
            trim_output = int(trim_output)
        except TypeError:
            ret['comment'] = (
                'Invalid value for trim_output, must be True/False or an '
                'integer')
            return ret

    cached_source = os.path.join(
        __opts__['cachedir'],
        'files',
        __env__,
        re.sub(r'[:/\\]', '_', source_hash_name),
    )

    if os.path.isdir(cached_source):
        # Prevent a traceback from attempting to read from a directory path
        salt.utils.rm_rf(cached_source)

    if source_hash:
        try:
            source_sum = __salt__['file.get_source_sum'](source_hash_name,
                                                         source_hash, __env__)
        except CommandExecutionError as exc:
            ret['comment'] = exc.strerror
            return ret

        if source_hash_update:
            if _compare_checksum(cached_source, source_sum):
                ret['result'] = True
                ret['comment'] = \
                    'Hash {0} has not changed'.format(source_sum['hsum'])
                return ret
    else:
        source_sum = {}

    if not os.path.isfile(cached_source):
        if __opts__['test']:
            ret['result'] = None
            ret['comment'] = \
                'Archive {0} would be downloaded to cache'.format(source_match)
            return ret

        log.debug('%s is not in cache, downloading it', source_match)

        file_result = __salt__['state.single'](
            'file.managed',
            cached_source,
            source=source_match,
            source_hash=source_hash,
            makedirs=True,
            skip_verify=skip_verify,
            saltenv=__env__,
            source_hash_name=source_hash_name)
        log.debug('file.managed: {0}'.format(file_result))

        # Get actual state result. The state.single return is a single-element
        # dictionary with the state's unique ID at the top level, and its value
        # being the state's return dictionary. next(iter(dict_name)) will give
        # us the value of the first key, so
        # file_result[next(iter(file_result))] will give us the results of the
        # state.single we just ran.
        try:
            file_result = file_result[next(iter(file_result))]
        except AttributeError:
            pass

        try:
            if not file_result['result']:
                log.debug('failed to download {0}'.format(source_match))
                return file_result
        except TypeError:
            if not file_result:
                log.debug('failed to download {0}'.format(source_match))
                return file_result
    else:
        log.debug('Archive %s is already in cache', source_match)

    if source_hash:
        _update_checksum(cached_source, source_sum)

    if archive_format == 'zip' and not password:
        log.debug('Checking %s to see if it is password-protected',
                  source_match)
        # Either use_cmd_unzip was explicitly set to True, or was
        # implicitly enabled by setting the "options" argument.
        try:
            encrypted_zip = __salt__['archive.is_encrypted'](cached_source,
                                                             clean=False,
                                                             saltenv=__env__)
        except CommandExecutionError:
            # This would happen if archive_format=zip and the source archive is
            # not actually a zip file.
            pass
        else:
            if encrypted_zip:
                ret['comment'] = (
                    'Archive {0} is password-protected, but no password was '
                    'specified. Please set the \'password\' argument.'.format(
                        source_match))
                return ret

    try:
        contents = __salt__['archive.list'](cached_source,
                                            archive_format=archive_format,
                                            options=list_options,
                                            clean=False,
                                            verbose=True)
    except CommandExecutionError as exc:
        contents = None
        errors = []
        if not if_missing:
            errors.append('\'if_missing\' must be set')
        if not enforce_ownership_on and (user or group):
            errors.append('Ownership cannot be managed without setting '
                          '\'enforce_ownership_on\'.')
        msg = exc.strerror
        if errors:
            msg += '\n\n'
            if archive_format == 'tar':
                msg += (
                    'If the source archive is a tar archive compressed using '
                    'a compression type not natively supported by the tar '
                    'command, then setting the \'list_options\' argument may '
                    'allow the contents to be listed. Otherwise, if Salt is '
                    'unable to determine the files/directories in the '
                    'archive, the following workaround(s) would need to be '
                    'used for this state to proceed')
            else:
                msg += (
                    'The following workarounds must be used for this state to '
                    'proceed')
            msg += (' (assuming the source file is a valid {0} archive):\n'.
                    format(archive_format))

            for error in errors:
                msg += '\n- {0}'.format(error)
        ret['comment'] = msg
        return ret

    if enforce_toplevel and contents is not None \
            and (len(contents['top_level_dirs']) > 1
                 or len(contents['top_level_files']) > 0):
        ret['comment'] = (
            'Archive does not have a single top-level directory. '
            'To allow this archive to be extracted, set '
            '\'enforce_toplevel\' to False. To avoid a '
            '\'{0}-bomb\' it may also be advisable to set a '
            'top-level directory by adding it to the \'name\' '
            'value (for example, setting \'name\' to {1} '
            'instead of {2}).'.format(
                archive_format,
                os.path.join(name, 'some_dir'),
                name,
            ))
        return ret

    # Check to see if we need to extract the archive. Using os.stat() in a
    # try/except is considerably faster than using os.path.exists(), and we
    # already need to catch an OSError to cover edge cases where the minion is
    # running as a non-privileged user and is trying to check for the existence
    # of a path to which it does not have permission.
    extraction_needed = False
    try:
        if_missing_path_exists = os.path.exists(if_missing)
    except TypeError:
        if_missing_path_exists = False

    if not if_missing_path_exists:
        if contents is None:
            try:
                os.stat(if_missing)
                extraction_needed = False
            except OSError as exc:
                if exc.errno == errno.ENOENT:
                    extraction_needed = True
                else:
                    ret['comment'] = (
                        'Failed to check for existence of if_missing path '
                        '({0}): {1}'.format(if_missing, exc.__str__()))
                    return ret
        else:
            incorrect_type = []
            extraction_needed = False
            for path_list, func in ((contents['dirs'], stat.S_ISDIR),
                                    (contents['files'], stat.S_ISREG)):
                for path in path_list:
                    full_path = os.path.join(name, path)
                    try:
                        path_mode = os.stat(full_path).st_mode
                        if not func(path_mode):
                            incorrect_type.append(path)
                    except OSError as exc:
                        if exc.errno == errno.ENOENT:
                            extraction_needed = True
                        else:
                            ret['comment'] = exc.__str__()
                            return ret

            if incorrect_type:
                if not force:
                    msg = (
                        'The below paths (relative to {0}) exist, but are the '
                        'incorrect type (i.e. file instead of directory or '
                        'vice-versa). To proceed with extraction, set '
                        '\'force\' to True.\n'.format(name))
                    for path in incorrect_type:
                        msg += '\n- {0}'.format(path)
                    ret['comment'] = msg
                else:
                    errors = []
                    for path in incorrect_type:
                        full_path = os.path.join(name, path)
                        try:
                            salt.utils.rm_rf(full_path)
                            ret['changes'].setdefault('removed',
                                                      []).append(full_path)
                        except OSError as exc:
                            if exc.errno != errno.ENOENT:
                                errors.append(exc.__str__())
                    if errors:
                        msg = (
                            'One or more paths existed by were the incorrect '
                            'type (i.e. file instead of directory or '
                            'vice-versa), but could not be removed. The '
                            'following errors were observed:\n')
                        for error in errors:
                            msg += '\n- {0}'.format(error)
                        ret['comment'] = msg
                        return ret

    created_destdir = False

    if extraction_needed:
        if __opts__['test']:
            ret['result'] = None
            ret['comment'] = \
                'Archive {0} would be extracted to {1}'.format(
                    source_match,
                    name
                )
            return ret

        if not os.path.isdir(name):
            __salt__['file.makedirs'](name, user=user)
            created_destdir = True

        log.debug('Extracting {0} to {1}'.format(cached_source, name))
        try:
            if archive_format == 'zip':
                if use_cmd_unzip:
                    files = __salt__['archive.cmd_unzip'](
                        cached_source,
                        name,
                        options=options,
                        trim_output=trim_output,
                        password=password,
                        **kwargs)
                else:
                    files = __salt__['archive.unzip'](cached_source,
                                                      name,
                                                      options=options,
                                                      trim_output=trim_output,
                                                      password=password,
                                                      **kwargs)
            elif archive_format == 'rar':
                files = __salt__['archive.unrar'](cached_source,
                                                  name,
                                                  trim_output=trim_output,
                                                  **kwargs)
            else:
                if options is None:
                    try:
                        with closing(tarfile.open(cached_source, 'r')) as tar:
                            tar.extractall(name)
                            files = tar.getnames()
                    except tarfile.ReadError:
                        if salt.utils.which('xz'):
                            if __salt__['cmd.retcode'](
                                ['xz', '-l', cached_source],
                                    python_shell=False,
                                    ignore_retcode=True) == 0:
                                # XZ-compressed data
                                log.debug(
                                    'Tar file is XZ-compressed, attempting '
                                    'decompression and extraction using xz-utils '
                                    'and the tar command')
                                # Must use python_shell=True here because not
                                # all tar implementations support the -J flag
                                # for decompressing XZ-compressed data. We need
                                # to dump the decompressed data to stdout and
                                # pipe it to tar for extraction.
                                cmd = 'xz --decompress --stdout {0} | tar xvf -'
                                results = __salt__['cmd.run_all'](
                                    cmd.format(_cmd_quote(cached_source)),
                                    cwd=name,
                                    python_shell=True)
                                if results['retcode'] != 0:
                                    if created_destdir:
                                        _cleanup_destdir(name)
                                    ret['result'] = False
                                    ret['changes'] = results
                                    return ret
                                if _is_bsdtar():
                                    files = results['stderr']
                                else:
                                    files = results['stdout']
                            else:
                                # Failed to open tar archive and it is not
                                # XZ-compressed, gracefully fail the state
                                if created_destdir:
                                    _cleanup_destdir(name)
                                ret['result'] = False
                                ret['comment'] = (
                                    'Failed to read from tar archive using '
                                    'Python\'s native tar file support. If '
                                    'archive is compressed using something '
                                    'other than gzip or bzip2, the '
                                    '\'options\' argument may be required to '
                                    'pass the correct options to the tar '
                                    'command in order to extract the archive.')
                                return ret
                        else:
                            if created_destdir:
                                _cleanup_destdir(name)
                            ret['result'] = False
                            ret['comment'] = (
                                'Failed to read from tar archive. If it is '
                                'XZ-compressed, install xz-utils to attempt '
                                'extraction.')
                            return ret
                else:
                    try:
                        tar_opts = shlex.split(options)
                    except AttributeError:
                        tar_opts = shlex.split(str(options))

                    tar_cmd = ['tar']
                    tar_shortopts = 'x'
                    tar_longopts = []

                    for position, opt in enumerate(tar_opts):
                        if opt.startswith('-'):
                            tar_longopts.append(opt)
                        else:
                            if position > 0:
                                tar_longopts.append(opt)
                            else:
                                append_opt = opt
                                append_opt = append_opt.replace('x', '')
                                append_opt = append_opt.replace('f', '')
                                tar_shortopts = tar_shortopts + append_opt

                    if __grains__['os'].lower() == 'openbsd':
                        tar_shortopts = '-' + tar_shortopts

                    tar_cmd.append(tar_shortopts)
                    tar_cmd.extend(tar_longopts)
                    tar_cmd.extend(['-f', cached_source])

                    results = __salt__['cmd.run_all'](tar_cmd,
                                                      cwd=name,
                                                      python_shell=False)
                    if results['retcode'] != 0:
                        ret['result'] = False
                        ret['changes'] = results
                        return ret
                    if _is_bsdtar():
                        files = results['stderr']
                    else:
                        files = results['stdout']
                    if not files:
                        files = 'no tar output so far'
        except CommandExecutionError as exc:
            ret['comment'] = exc.strerror
            return ret

    # Recursively set user and group ownership of files
    enforce_missing = []
    enforce_failed = []
    if user or group:
        if enforce_ownership_on:
            enforce_dirs = [enforce_ownership_on]
            enforce_files = []
        else:
            if contents is not None:
                enforce_dirs = contents['top_level_dirs']
                enforce_files = contents['top_level_files']

        recurse = []
        if user:
            recurse.append('user')
        if group:
            recurse.append('group')
        recurse_str = ', '.join(recurse)

        owner_changes = dict([(x, y)
                              for x, y in (('user', user), ('group', group))
                              if y])
        for dirname in enforce_dirs:
            full_path = os.path.join(name, dirname)
            if not os.path.isdir(full_path):
                if not __opts__['test']:
                    enforce_missing.append(full_path)
            else:
                log.debug(
                    'Enforcing %s ownership on %s using a file.directory state%s',
                    recurse_str, dirname,
                    ' (dry-run only)' if __opts__['test'] else '')
                dir_result = __salt__['state.single']('file.directory',
                                                      full_path,
                                                      user=user,
                                                      group=group,
                                                      recurse=recurse,
                                                      test=__opts__['test'])
                try:
                    dir_result = dir_result[next(iter(dir_result))]
                except AttributeError:
                    pass
                log.debug('file.directory: %s', dir_result)

                if __opts__['test']:
                    if dir_result.get('pchanges'):
                        ret['changes']['updated ownership'] = True
                else:
                    try:
                        if dir_result['result']:
                            if dir_result['changes']:
                                ret['changes']['updated ownership'] = True
                        else:
                            enforce_failed.append(full_path)
                    except (KeyError, TypeError):
                        log.warning(
                            'Bad state return %s for file.directory state on %s',
                            dir_result, dirname)

        for filename in enforce_files:
            full_path = os.path.join(name, filename)
            try:
                # Using os.stat instead of calling out to
                # __salt__['file.stats'], since we may be doing this for a lot
                # of files, and simply calling os.stat directly will speed
                # things up a bit.
                file_stat = os.stat(full_path)
            except OSError as exc:
                if not __opts__['test']:
                    if exc.errno == errno.ENOENT:
                        enforce_missing.append(full_path)
                    enforce_failed.append(full_path)
            else:
                # Earlier we set uid, gid to -1 if we're not enforcing
                # ownership on user, group, as passing -1 to os.chown will tell
                # it not to change that ownership. Since we've done that, we
                # can selectively compare the uid/gid from the values in
                # file_stat, _only if_ the "desired" uid/gid is something other
                # than -1.
                if (uid != -1 and uid != file_stat.st_uid) \
                        or (gid != -1 and gid != file_stat.st_gid):
                    if __opts__['test']:
                        ret['changes']['updated ownership'] = True
                    else:
                        try:
                            os.chown(full_path, uid, gid)
                            ret['changes']['updated ownership'] = True
                        except OSError:
                            enforce_failed.append(filename)

    if extraction_needed:
        if len(files) > 0:
            if created_destdir:
                ret['changes']['directories_created'] = [name]
            ret['changes']['extracted_files'] = files
            ret['comment'] = '{0} extracted to {1}'.format(source_match, name)
            if not keep:
                log.debug('Cleaning cached source file %s', cached_source)
                try:
                    os.remove(cached_source)
                except OSError as exc:
                    if exc.errno != errno.ENOENT:
                        log.error('Failed to clean cached source file %s: %s',
                                  cached_source, exc.__str__())
            ret['result'] = True

        else:
            ret['result'] = False
            ret['comment'] = 'Can\'t extract content of {0}'.format(
                source_match)

    else:
        ret['result'] = True
        if if_missing_path_exists:
            ret['comment'] = '{0} exists'.format(if_missing)
        else:
            ret['comment'] = 'All files in archive are already present'
        if __opts__['test']:
            if ret['changes'].get('updated ownership'):
                ret['result'] = None
                ret['comment'] += (
                    '. Ownership would be updated on one or more '
                    'files/directories.')

    if enforce_missing:
        if not if_missing:
            # If is_missing was used, and both a) the archive had never been
            # extracted, and b) the path referred to by if_missing exists, then
            # enforce_missing would contain paths of top_levle dirs/files that
            # _would_ have been extracted. Since if_missing can be used as a
            # semaphore to conditionally extract, we don't want to make this a
            # case where the state fails, so we only fail the state if
            # is_missing is not used.
            ret['result'] = False
        ret['comment'] += (
            '\n\nWhile trying to enforce user/group ownership, the following '
            'paths were missing:\n')
        for item in enforce_missing:
            ret['comment'] += '\n- {0}'.format(item)

    if enforce_failed:
        ret['result'] = False
        ret['comment'] += (
            '\n\nWhile trying to enforce user/group ownership, Salt was '
            'unable to change ownership on the following paths:\n')
        for item in enforce_failed:
            ret['comment'] += '\n- {0}'.format(item)

    return ret
Beispiel #23
0
def mod_repo(repo, **kwargs):
    """
    Modify one or more values for a repo. If the repo does not exist, it will
    be created, so long as the following values are specified:

    repo or alias
        alias by which the zypper refers to the repo

    url, mirrorlist or baseurl
        the URL for zypper to reference

    enabled
        enable or disable (True or False) repository,
        but do not remove if disabled.

    refresh
        enable or disable (True or False) auto-refresh of the repository.

    cache
        Enable or disable (True or False) RPM files caching.

    gpgcheck
        Enable or disable (True or False) GOG check for this repository.

    gpgautoimport
        Automatically trust and import new repository.

    Key/Value pairs may also be removed from a repo's configuration by setting
    a key to a blank value. Bear in mind that a name cannot be deleted, and a
    url can only be deleted if a mirrorlist is specified (or vice versa).

    CLI Examples:

    .. code-block:: bash

        salt '*' pkg.mod_repo alias alias=new_alias
        salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/
    """

    repos_cfg = _get_configured_repos()
    added = False

    # An attempt to add new one?
    if repo not in repos_cfg.sections():
        url = kwargs.get("url", kwargs.get("mirrorlist", kwargs.get("baseurl")))
        if not url:
            raise CommandExecutionError("Repository '{0}' not found and no URL passed to create one.".format(repo))

        if not _urlparse(url).scheme:
            raise CommandExecutionError("Repository '{0}' not found and passed URL looks wrong.".format(repo))

        # Is there already such repo under different alias?
        for alias in repos_cfg.sections():
            repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg)

            # Complete user URL, in case it is not
            new_url = _urlparse(url)
            if not new_url.path:
                new_url = _urlparse.ParseResult(
                    scheme=new_url.scheme,  # pylint: disable=E1123
                    netloc=new_url.netloc,
                    path="/",
                    params=new_url.params,
                    query=new_url.query,
                    fragment=new_url.fragment,
                )
            base_url = _urlparse(repo_meta["baseurl"])

            if new_url == base_url:
                raise CommandExecutionError("Repository '{0}' already exists as '{1}'.".format(repo, alias))

        # Add new repo
        doc = None
        try:
            # Try to parse the output and find the error,
            # but this not always working (depends on Zypper version)
            doc = dom.parseString(
                __salt__["cmd.run"](("zypper -x ar {0} '{1}'".format(url, repo)), output_loglevel="trace")
            )
        except Exception:
            # No XML out available, but it is still unknown the state of the result.
            pass

        if doc:
            msg_nodes = doc.getElementsByTagName("message")
            if msg_nodes:
                msg_node = msg_nodes[0]
                if msg_node.getAttribute("type") == "error":
                    raise CommandExecutionError(msg_node.childNodes[0].nodeValue)

        # Verify the repository has been added
        repos_cfg = _get_configured_repos()
        if repo not in repos_cfg.sections():
            raise CommandExecutionError(
                "Failed add new repository '{0}' for unknown reason. " "Please look into Zypper logs.".format(repo)
            )
        added = True

    # Modify added or existing repo according to the options
    cmd_opt = []

    if "enabled" in kwargs:
        cmd_opt.append(kwargs["enabled"] and "--enable" or "--disable")

    if "refresh" in kwargs:
        cmd_opt.append(kwargs["refresh"] and "--refresh" or "--no-refresh")

    if "cache" in kwargs:
        cmd_opt.append(kwargs["cache"] and "--keep-packages" or "--no-keep-packages")

    if "gpgcheck" in kwargs:
        cmd_opt.append(kwargs["gpgcheck"] and "--gpgcheck" or "--no-gpgcheck")

    if kwargs.get("gpgautoimport") is True:
        cmd_opt.append("--gpg-auto-import-keys")

    if cmd_opt:
        __salt__["cmd.run"](("zypper -x mr {0} '{1}'".format(" ".join(cmd_opt), repo)), output_loglevel="trace")

    # If repo nor added neither modified, error should be thrown
    if not added and not cmd_opt:
        raise CommandExecutionError("Modification of the repository '{0}' was not specified.".format(repo))

    return get_repo(repo)
Beispiel #24
0
 def extract_name(queue_url):
     # Note: this logic taken from boto, so should be safe
     return _urlparse(queue_url).path.split('/')[2]
Beispiel #25
0
def extracted(name,
              source,
              archive_format,
              archive_user=None,
              password=None,
              user=None,
              group=None,
              tar_options=None,
              source_hash=None,
              if_missing=None,
              keep=False,
              trim_output=False,
              skip_verify=False,
              source_hash_update=None):
    '''
    .. versionadded:: 2014.1.0

    State that make sure an archive is extracted in a directory.
    The downloaded archive is erased if successfully extracted.
    The archive is downloaded only if necessary.

    .. note::

        If ``if_missing`` is not defined, this state will check for ``name``
        instead.  If ``name`` exists, it will assume the archive was previously
        extracted successfully and will not extract it again.

    Example, tar with flag for lmza compression:

    .. code-block:: yaml

        graylog2-server:
          archive.extracted:
            - name: /opt/
            - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.lzma
            - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
            - tar_options: J
            - archive_format: tar
            - if_missing: /opt/graylog2-server-0.9.6p1/

    Example, tar with flag for verbose output:

    .. code-block:: yaml

        graylog2-server:
          archive.extracted:
            - name: /opt/
            - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.gz
            - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
            - archive_format: tar
            - tar_options: v
            - user: root
            - group: root
            - if_missing: /opt/graylog2-server-0.9.6p1/

    Example, tar with flag for lmza compression and update based if source_hash differs from what was
    previously extracted:

    .. code-block:: yaml

        graylog2-server:
          archive.extracted:
            - name: /opt/
            - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.lzma
            - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
            - source_hash_update: true
            - tar_options: J
            - archive_format: tar
            - if_missing: /opt/graylog2-server-0.9.6p1/

    name
        Location where archive should be extracted

    password
        Password to use with password protected zip files. Currently only zip
        files with passwords are supported.

        .. versionadded:: 2016.3.0

    source
        Archive source, same syntax as file.managed source argument.

    source_hash
        Hash of source file, or file with list of hash-to-file mappings.
        It uses the same syntax as the file.managed source_hash argument.

    source_hash_update
        Set this to ``True`` if archive should be extracted if source_hash has
        changed. This would extract regardless of the ``if_missing`` parameter.

        .. versionadded:: 2016.3.0

    skip_verify:False
        If ``True``, hash verification of remote file sources (``http://``,
        ``https://``, ``ftp://``) will be skipped, and the ``source_hash``
        argument will be ignored.

        .. versionadded:: 2016.3.4

    archive_format
        ``tar``, ``zip`` or ``rar``

    archive_user
        The user to own each extracted file.

        .. deprecated:: 2014.7.2
            Replaced by ``user`` parameter

    user
        The user to own each extracted file.

        .. versionadded:: 2015.8.0
        .. versionchanged:: 2016.3.0
            When used in combination with ``if_missing``, ownership will only
            be enforced if ``if_missing`` is a directory.

    group
        The group to own each extracted file.

        .. versionadded:: 2015.8.0
        .. versionchanged:: 2016.3.0
            When used in combination with ``if_missing``, ownership will only
            be enforced if ``if_missing`` is a directory.

    if_missing
        If specified, this path will be checked, and if it exists then the
        archive will not be extracted. This can be helpful if the archive
        extracts all files into a subfolder. This path can be either a
        directory or a file, so this option can also be used to check for a
        semaphore file and conditionally skip extraction.

        .. versionchanged:: 2016.3.0
            When used in combination with either ``user`` or ``group``,
            ownership will only be enforced when ``if_missing`` is a directory.

    tar_options
        If ``archive_format`` is set to ``tar``, this option can be used to
        specify a string of additional arguments to pass to the tar command. If
        ``archive_format`` is set to ``tar`` and this option is *not* used,
        then the minion will attempt to use Python's native tarfile_ support to
        extract it. Python's native tarfile_ support can only handle gzip and
        bzip2 compression, however.

        .. versionchanged:: 2015.8.11,2016.3.2
            XZ-compressed archives no longer require ``J`` to manually be set
            in the ``tar_options``, they are now detected automatically and
            Salt will extract them using ``xz-utils``. This is a more
            platform-independent solution, as not all tar implementations
            support the ``J`` argument for extracting archives.

        .. note::
            Main operators like -x, --extract, --get, -c and -f/--file **should
            not be used** here.

            Using this option means that the ``tar`` command will be used,
            which is less platform-independent, so keep this in mind when using
            this option; the options must be valid options for the ``tar``
            implementation on the minion's OS.

        .. _tarfile: https://docs.python.org/2/library/tarfile.html

    keep
        Keep the archive in the minion's cache

    trim_output
        The number of files we should output on success before the rest are
        trimmed, if this is set to True then it will default to 100

        .. versionadded:: 2016.3.0
    '''
    ret = {'name': name, 'result': None, 'changes': {}, 'comment': ''}
    valid_archives = ('tar', 'rar', 'zip')

    if archive_format not in valid_archives:
        ret['result'] = False
        ret['comment'] = '{0} is not supported, valid formats are: {1}'.format(
            archive_format, ','.join(valid_archives))
        return ret

    # remove this whole block after formal deprecation.
    if archive_user is not None:
        warn_until(
          'Carbon',
          'Passing \'archive_user\' is deprecated.'
          'Pass \'user\' instead.'
        )
        if user is None:
            user = archive_user

    if not name.endswith('/'):
        name += '/'

    if __opts__['test']:
        source_match = source
    else:
        try:
            source_match = __salt__['file.source_list'](source,
                                                        source_hash,
                                                        __env__)[0]
        except CommandExecutionError as exc:
            ret['result'] = False
            ret['comment'] = exc.strerror
            return ret

    urlparsed_source = _urlparse(source_match)
    source_hash_name = urlparsed_source.path or urlparsed_source.netloc

    source_is_local = urlparsed_source.scheme in ('', 'file')
    if source_is_local:
        # Get rid of "file://" from start of source_match
        source_match = urlparsed_source.path
        if not os.path.isfile(source_match):
            ret['comment'] = 'Source file \'{0}\' does not exist'.format(source_match)
            return ret

    if if_missing is None:
        if_missing = name
    if source_hash and source_hash_update:
        if urlparsed_source.scheme != '':
            ret['result'] = False
            ret['comment'] = (
                '\'source_hash_update\' is not yet implemented for a remote '
                'source_hash'
            )
            return ret
        else:
            try:
                hash_type, hsum = source_hash.split('=')
            except ValueError:
                ret['result'] = False
                ret['comment'] = 'Invalid source_hash format'
                return ret
            source_file = '{0}.{1}'.format(os.path.basename(source), hash_type)
            hash_fname = os.path.join(__opts__['cachedir'],
                                'files',
                                __env__,
                                source_file)
            if _compare_checksum(hash_fname, name, hsum):
                ret['result'] = True
                ret['comment'] = 'Hash {0} has not changed'.format(hsum)
                return ret
    elif (
        __salt__['file.directory_exists'](if_missing)
        or __salt__['file.file_exists'](if_missing)
    ):
        ret['result'] = True
        ret['comment'] = '{0} already exists'.format(if_missing)
        return ret

    log.debug('Input seem valid so far')
    if source_is_local:
        filename = source_match
    else:
        filename = os.path.join(
            __opts__['cachedir'],
            'files',
            __env__,
            '{0}.{1}'.format(re.sub('[:/\\\\]', '_', if_missing), archive_format))

    if not source_is_local and not os.path.isfile(filename):
        if __opts__['test']:
            ret['result'] = None
            ret['comment'] = \
                '{0} {1} would be downloaded to cache'.format(
                    'One of' if not isinstance(source_match, six.string_types)
                        else 'Archive',
                    source_match
                )
            return ret

        log.debug('%s is not in cache, downloading it', source_match)

        file_result = __states__['file.managed'](filename,
                                                 source=source_match,
                                                 source_hash=source_hash,
                                                 makedirs=True,
                                                 skip_verify=skip_verify,
                                                 source_hash_name=source_hash_name)
        log.debug('file.managed: {0}'.format(file_result))
        # get value of first key
        try:
            file_result = file_result[next(six.iterkeys(file_result))]
        except AttributeError:
            pass

        try:
            if not file_result['result']:
                log.debug('failed to download {0}'.format(source))
                return file_result
        except TypeError:
            if not file_result:
                log.debug('failed to download {0}'.format(source))
                return file_result
    else:
        log.debug('Archive %s is already in cache', source)

    if __opts__['test']:
        ret['result'] = None
        ret['comment'] = '{0} {1} would be extracted to {2}'.format(
                'One of' if not isinstance(source_match, six.string_types)
                    else 'Archive',
                source_match,
                name
            )
        return ret

    created_destdir = False
    if __salt__['file.file_exists'](name.rstrip('/')):
        ret['result'] = False
        ret['comment'] = ('{0} exists and is not a directory'
                          .format(name.rstrip('/')))
        return ret
    elif not __salt__['file.directory_exists'](name):
        __salt__['file.makedirs'](name, user=archive_user)
        created_destdir = True

    log.debug('Extracting {0} to {1}'.format(filename, name))
    if archive_format == 'zip':
        if password is None and salt.utils.which('unzip'):
            files = __salt__['archive.cmd_unzip'](filename, name, trim_output=trim_output)
        else:
            # https://bugs.python.org/issue15795
            if password is not None:
                log.warning('Password supplied: using archive.unzip')
            if not salt.utils.which('unzip'):
                log.warning('Cannot find unzip command for archive.cmd_unzip:'
                            ' using archive.unzip instead')
            files = __salt__['archive.unzip'](filename, name, trim_output=trim_output, password=password)
    elif archive_format == 'rar':
        files = __salt__['archive.unrar'](filename, name, trim_output=trim_output)
    else:
        if tar_options is None:
            try:
                with closing(tarfile.open(filename, 'r')) as tar:
                    files = tar.getnames()
                    tar.extractall(name)
            except tarfile.ReadError:
                if salt.utils.which('xz'):
                    if __salt__['cmd.retcode'](['xz', '-l', filename],
                                               python_shell=False,
                                               ignore_retcode=True) == 0:
                        # XZ-compressed data
                        log.debug(
                            'Tar file is XZ-compressed, attempting '
                            'decompression and extraction using xz-utils '
                            'and the tar command'
                        )
                        # Must use python_shell=True here because not all tar
                        # implementations support the -J flag for decompressing
                        # XZ-compressed data. We need to dump the decompressed
                        # data to stdout and pipe it to tar for extraction.
                        cmd = 'xz --decompress --stdout {0} | tar xvf -'
                        results = __salt__['cmd.run_all'](
                            cmd.format(_cmd_quote(filename)),
                            cwd=name,
                            python_shell=True)
                        if results['retcode'] != 0:
                            if created_destdir:
                                _cleanup_destdir(name)
                            ret['result'] = False
                            ret['changes'] = results
                            return ret
                        if _is_bsdtar():
                            files = results['stderr']
                        else:
                            files = results['stdout']
                    else:
                        # Failed to open tar archive and it is not
                        # XZ-compressed, gracefully fail the state
                        if created_destdir:
                            _cleanup_destdir(name)
                        ret['result'] = False
                        ret['comment'] = (
                            'Failed to read from tar archive using Python\'s '
                            'native tar file support. If archive is '
                            'compressed using something other than gzip or '
                            'bzip2, the \'tar_options\' parameter may be '
                            'required to pass the correct options to the tar '
                            'command in order to extract the archive.'
                        )
                        return ret
                else:
                    if created_destdir:
                        _cleanup_destdir(name)
                    ret['result'] = False
                    ret['comment'] = (
                        'Failed to read from tar archive. If it is '
                        'XZ-compressed, install xz-utils to attempt '
                        'extraction.'
                    )
                    return ret
        else:
            try:
                tar_opts = tar_options.split(' ')
            except AttributeError:
                tar_opts = str(tar_options).split(' ')

            tar_cmd = ['tar']
            tar_shortopts = 'x'
            tar_longopts = []

            for position, opt in enumerate(tar_opts):
                if opt.startswith('-'):
                    tar_longopts.append(opt)
                else:
                    if position > 0:
                        tar_longopts.append(opt)
                    else:
                        append_opt = opt
                        append_opt = append_opt.replace('x', '').replace('f', '')
                        tar_shortopts = tar_shortopts + append_opt

            tar_cmd.append(tar_shortopts)
            tar_cmd.extend(tar_longopts)
            tar_cmd.extend(['-f', filename])

            results = __salt__['cmd.run_all'](tar_cmd, cwd=name, python_shell=False)
            if results['retcode'] != 0:
                ret['result'] = False
                ret['changes'] = results
                return ret
            if _is_bsdtar():
                files = results['stderr']
            else:
                files = results['stdout']
            if not files:
                files = 'no tar output so far'

    # Recursively set user and group ownership of files after extraction.
    # Note: We do this here because we might not have access to the cachedir.
    if user or group:
        if os.path.isdir(if_missing):
            recurse = []
            if user:
                recurse.append('user')
            if group:
                recurse.append('group')
            dir_result = __states__['file.directory'](if_missing,
                                                      user=user,
                                                      group=group,
                                                      recurse=recurse)
            log.debug('file.directory: %s', dir_result)
        elif os.path.isfile(if_missing):
            log.debug('if_missing (%s) is a file, not enforcing user/group '
                      'permissions', if_missing)

    if len(files) > 0:
        ret['result'] = True
        ret['changes']['directories_created'] = [name]
        ret['changes']['extracted_files'] = files
        ret['comment'] = '{0} extracted to {1}'.format(source_match, name)
        if not source_is_local and not keep:
            os.unlink(filename)
        if source_hash and source_hash_update:
            _update_checksum(hash_fname, name, hash[1])

    else:
        __salt__['file.remove'](if_missing)
        ret['result'] = False
        ret['comment'] = 'Can\'t extract content of {0}'.format(source_match)
    return ret