Exemple #1
0
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = "".join(list(filter(str.isalnum, file_meta['ETag']))) \
            if file_meta else None

        cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

        log.debug("Cached file: path={0}, md5={1}, etag={2}".format(
            cached_file_path, cached_md5, file_md5))

        # hashes match we have a cache hit
        log.debug("Cached file: path={0}, md5={1}, etag={2}".format(
            cached_file_path, cached_md5, file_md5))
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    __utils__['s3.query'](key=creds.key,
                          keyid=creds.keyid,
                          kms_keyid=creds.kms_keyid,
                          bucket=bucket,
                          service_url=creds.service_url,
                          path=_quote(path),
                          local_file=cached_file_path,
                          verify_ssl=creds.verify_ssl,
                          location=creds.location)
Exemple #2
0
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = list(filter(str.isalnum, file_meta['ETag'])) \
            if file_meta else None

        cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

        # hashes match we have a cache hit
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    s3.query(
        key=creds.key,
        keyid=creds.keyid,
        bucket=bucket,
        service_url=creds.service_url,
        path=_quote(path),
        local_file=cached_file_path,
        verify_ssl=creds.verify_ssl
    )
Exemple #3
0
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = list(filter(str.isalnum, file_meta['ETag'])) \
            if file_meta else None

        cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

        # hashes match we have a cache hit
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    s3.query(key=creds.key,
             keyid=creds.keyid,
             bucket=bucket,
             service_url=creds.service_url,
             path=urllib.quote(path),
             local_file=cached_file_path,
             verify_ssl=creds.verify_ssl)
def list_installed():
    '''
    Return a list of all installed kernels.

    CLI Example:

    .. code-block:: bash

        salt '*' kernelpkg.list_installed
    '''
    pkg_re = re.compile(r'^{0}-[\d.-]+-{1}$'.format(
        _package_prefix(), _kernel_type()))
    pkgs = __salt__['pkg.list_pkgs'](versions_as_list=True)
    if pkgs is None:
        pkgs = []

    result = list(filter(pkg_re.match, pkgs))
    if result is None:
        return []

    prefix_len = len(_package_prefix()) + 1

    if six.PY2:
        return sorted([pkg[prefix_len:] for pkg in result], cmp=_cmp_version)
    else:
        return sorted([pkg[prefix_len:] for pkg in result], key=functools.cmp_to_key(_cmp_version))
Exemple #5
0
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = "".join(list(filter(str.isalnum, file_meta['ETag']))) \
            if file_meta else None

        cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

        log.debug("Cached file: path={0}, md5={1}, etag={2}".format(cached_file_path, cached_md5, file_md5))

        # hashes match we have a cache hit
        log.debug("Cached file: path={0}, md5={1}, etag={2}".format(cached_file_path, cached_md5, file_md5))
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    __utils__['s3.query'](
        key=creds.key,
        keyid=creds.keyid,
        kms_keyid=creds.kms_keyid,
        bucket=bucket,
        service_url=creds.service_url,
        path=_quote(path),
        local_file=cached_file_path,
        verify_ssl=creds.verify_ssl,
        location=creds.location
    )
Exemple #6
0
def _get_file_from_blob(connection_string, metadata, saltenv, container, path,
                        cached_file_path):
    """
    .. versionadded:: 3001

    Downloads the entire contents of an Azure storage container to the local filesystem.

    :param connection_string: The connection string to use to access the specified Azure Blob Container.

    :param metadata: The metadata for the container files.

    :param saltenv: Specifies which environment the container represents when in single environment mode. This is
        ignored if multiple_env is set as True.

    :param container: The name of the target Azure Blob Container.

    :param path: The path of the file in the container.

    :param cached_file_path: The path of where the file will be cached.

    """
    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, container, saltenv, path)
        file_md5 = ("".join(list(filter(str.isalnum, file_meta["etag"])))
                    if file_meta else None)

        cached_md5 = salt.utils.hashutils.get_hash(cached_file_path, "md5")

        # hashes match we have a cache hit
        log.debug(
            "Cached file: path=%s, md5=%s, etag=%s",
            cached_file_path,
            cached_md5,
            file_md5,
        )
        if cached_md5 == file_md5:
            return

    try:
        # Create the BlobServiceClient object which will be used to create a container client
        blob_service_client = BlobServiceClient.from_connection_string(
            connection_string)

        # Create the ContainerClient object
        container_client = blob_service_client.get_container_client(container)

        # Create the BlobClient object
        blob_client = container_client.get_blob_client(path)
    except Exception as exc:  # pylint: disable=broad-except
        log.error("Exception: %s", exc)
        return False

    with salt.utils.files.fopen(cached_file_path, "wb") as outfile:
        outfile.write(blob_client.download_blob().readall())

    return
Exemple #7
0
def _find_file_meta(metadata, bucket, saltenv, path):
    '''
    Looks for a file's metadata in the S3 bucket cache file
    '''

    env_meta = metadata[saltenv] if saltenv in metadata else {}
    bucket_meta = env_meta[bucket] if bucket in env_meta else {}
    files_meta = list(list(filter((lambda k: 'Key' in k), bucket_meta)))

    for item_meta in files_meta:
        if 'Key' in item_meta and item_meta['Key'] == path:
            return item_meta
Exemple #8
0
def _find_file_meta(metadata, bucket_name, saltenv, path):
    '''
    Looks for a file's metadata in the S3 bucket cache file
    '''

    env_meta = metadata[saltenv] if saltenv in metadata else {}
    bucket_meta = env_meta[bucket_name] if bucket_name in env_meta else {}
    files_meta = list(list(filter((lambda k: 'Key' in k), bucket_meta)))

    for item_meta in files_meta:
        if 'Key' in item_meta and item_meta['Key'] == path:
            return item_meta
Exemple #9
0
def _find_file_meta(metadata, bucket_name, saltenv, path):
    '''
    Looks for a file's metadata in the S3 bucket cache file
    '''
    env_meta = metadata[saltenv] if saltenv in metadata else {}
    bucket_meta = env_meta[bucket_name] if bucket_name in env_meta else {}
    files_meta = list(list(filter((lambda k: 'Key' in k), bucket_meta)))

    for item_meta in files_meta:
        if 'Key' in item_meta and item_meta['Key'] == path:
            try:
                # Get rid of quotes surrounding md5
                item_meta['ETag'] = item_meta['ETag'].strip('"')
            except KeyError:
                pass
            return item_meta
Exemple #10
0
def _find_file_meta(metadata, bucket_name, saltenv, path):
    '''
    Looks for a file's metadata in the S3 bucket cache file
    '''
    env_meta = metadata[saltenv] if saltenv in metadata else {}
    bucket_meta = env_meta[bucket_name] if bucket_name in env_meta else {}
    files_meta = list(list(filter((lambda k: 'Key' in k), bucket_meta)))

    for item_meta in files_meta:
        if 'Key' in item_meta and item_meta['Key'] == path:
            try:
                # Get rid of quotes surrounding md5
                item_meta['ETag'] = item_meta['ETag'].strip('"')
            except KeyError:
                pass
            return item_meta
Exemple #11
0
def _upstart_is_disabled(name):
    '''
    An Upstart service is assumed disabled if a manual stanza is
    placed in /etc/init/[name].override.
    NOTE: An Upstart service can also be disabled by placing "manual"
    in /etc/init/[name].conf.
    '''
    files = [
        '/etc/init/{0}.conf'.format(name),
        '/etc/init/{0}.override'.format(name)
    ]
    for file_name in filter(os.path.isfile, files):
        with salt.utils.files.fopen(file_name) as fp_:
            if re.search(r'^\s*manual',
                         salt.utils.stringutils.to_unicode(fp_.read()),
                         re.MULTILINE):
                return True
    return False
Exemple #12
0
def _find_file_meta(metadata, bucket_name, saltenv, path):
    """
    Looks for a file's metadata in the S3 bucket cache file
    """
    env_meta = metadata[saltenv] if saltenv in metadata else {}
    bucket_meta = {}
    for bucket in env_meta:
        if bucket_name in bucket:
            bucket_meta = bucket[bucket_name]
    files_meta = list(list(filter((lambda k: "Key" in k), bucket_meta)))

    for item_meta in files_meta:
        if "Key" in item_meta and item_meta["Key"] == path:
            try:
                # Get rid of quotes surrounding md5
                item_meta["ETag"] = item_meta["ETag"].strip('"')
            except KeyError:
                pass
            return item_meta
Exemple #13
0
def _upstart_enable(name):
    '''
    Enable an Upstart service.
    '''
    if _upstart_is_enabled(name):
        return _upstart_is_enabled(name)
    override = '/etc/init/{0}.override'.format(name)
    files = ['/etc/init/{0}.conf'.format(name), override]
    for file_name in filter(os.path.isfile, files):
        with salt.utils.files.fopen(file_name, 'r+') as fp_:
            new_text = re.sub(r'^\s*manual\n?', '',
                              salt.utils.stringutils.to_unicode(fp_.read()), 0,
                              re.MULTILINE)
            fp_.seek(0)
            fp_.write(salt.utils.stringutils.to_str(new_text))
            fp_.truncate()
    if os.access(override, os.R_OK) and os.path.getsize(override) == 0:
        os.unlink(override)
    return _upstart_is_enabled(name)
Exemple #14
0
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    """
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    """

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = ("".join(list(filter(str.isalnum, file_meta["ETag"])))
                    if file_meta else None)

        cached_md5 = salt.utils.hashutils.get_hash(cached_file_path, "md5")

        # hashes match we have a cache hit
        log.debug(
            "Cached file: path=%s, md5=%s, etag=%s",
            cached_file_path,
            cached_md5,
            file_md5,
        )
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    __utils__["s3.query"](
        key=creds.key,
        keyid=creds.keyid,
        kms_keyid=creds.kms_keyid,
        bucket=bucket,
        service_url=creds.service_url,
        path=_quote(path),
        local_file=cached_file_path,
        verify_ssl=creds.verify_ssl,
        location=creds.location,
        path_style=creds.path_style,
        https_enable=creds.https_enable,
    )
Exemple #15
0
def set_fstab(
        name,
        device,
        fstype,
        opts='defaults',
        dump=0,
        pass_num=0,
        config='/etc/fstab',
        test=False,
        match_on='auto',
        **kwargs):
    '''
    Verify that this mount is represented in the fstab, change the mount
    to match the data passed, or add the mount if it is not present.

    CLI Example:

    .. code-block:: bash

        salt '*' mount.set_fstab /mnt/foo /dev/sdz1 ext4
    '''

    # Fix the opts type if it is a list
    if isinstance(opts, list):
        opts = ','.join(opts)

    # preserve arguments for updating
    entry_args = {
        'name': name,
        'device': device,
        'fstype': fstype,
        'opts': opts,
        'dump': dump,
        'pass_num': pass_num,
    }

    lines = []
    ret = None

    # Transform match_on into list--items will be checked later
    if isinstance(match_on, list):
        pass
    elif not isinstance(match_on, six.string_types):
        msg = 'match_on must be a string or list of strings'
        raise CommandExecutionError(msg)
    elif match_on == 'auto':
        # Try to guess right criteria for auto....
        # NOTE: missing some special fstypes here
        specialFSes = frozenset([
            'none',
            'tmpfs',
            'sysfs',
            'proc',
            'fusectl',
            'debugfs',
            'securityfs',
            'devtmpfs',
            'cgroup'])

        if fstype in specialFSes:
            match_on = ['name']
        else:
            match_on = ['device']
    else:
        match_on = [match_on]

    # generate entry and criteria objects, handle invalid keys in match_on
    entry = _fstab_entry(**entry_args)
    try:
        criteria = entry.pick(match_on)

    except KeyError:
        filterFn = lambda key: key not in _fstab_entry.fstab_keys
        invalid_keys = filter(filterFn, match_on)

        msg = 'Unrecognized keys in match_on: "{0}"'.format(invalid_keys)
        raise CommandExecutionError(msg)

    # parse file, use ret to cache status
    if not os.path.isfile(config):
        raise CommandExecutionError('Bad config file "{0}"'.format(config))

    try:
        with salt.utils.fopen(config, 'r') as ifile:
            for line in ifile:
                try:
                    if criteria.match(line):
                        # Note: If ret isn't None here,
                        # we've matched multiple lines
                        ret = 'present'
                        if entry.match(line):
                            lines.append(line)
                        else:
                            ret = 'change'
                            lines.append(str(entry))
                    else:
                        lines.append(line)

                except _fstab_entry.ParseError:
                    lines.append(line)

    except (IOError, OSError) as exc:
        msg = 'Couldn\'t read from {0}: {1}'
        raise CommandExecutionError(msg.format(config, str(exc)))

    # add line if not present or changed
    if ret is None:
        lines.append(str(entry))
        ret = 'new'

    if ret != 'present':  # ret in ['new', 'change']:
        if not salt.utils.test_mode(test=test, **kwargs):
            try:
                with salt.utils.fopen(config, 'w+') as ofile:
                    # The line was changed, commit it!
                    ofile.writelines(lines)
            except (IOError, OSError):
                msg = 'File not writable {0}'
                raise CommandExecutionError(msg.format(config))

    return ret
Exemple #16
0
    def _install(self, args):
        '''
        Install a package from a repo
        '''
        if len(args) < 2:
            raise SPMInvocationError('A package must be specified')

        caller_opts = self.opts.copy()
        caller_opts['file_client'] = 'local'
        self.caller = salt.client.Caller(mopts=caller_opts)
        self.client = salt.client.get_local_client(self.opts['conf_file'])
        cache = salt.cache.Cache(self.opts)

        packages = args[1:]
        file_map = {}
        optional = []
        recommended = []
        to_install = []
        for pkg in packages:
            if pkg.endswith('.spm'):
                if self._pkgfiles_fun('path_exists', pkg):
                    comps = pkg.split('-')
                    comps = '-'.join(comps[:-2]).split('/')
                    pkg_name = comps[-1]

                    formula_tar = tarfile.open(pkg, 'r:bz2')
                    formula_ref = formula_tar.extractfile('{0}/FORMULA'.format(pkg_name))
                    formula_def = yaml.safe_load(formula_ref)

                    file_map[pkg_name] = pkg
                    to_, op_, re_ = self._check_all_deps(
                        pkg_name=pkg_name,
                        pkg_file=pkg,
                        formula_def=formula_def
                    )
                    to_install.extend(to_)
                    optional.extend(op_)
                    recommended.extend(re_)
                else:
                    raise SPMInvocationError('Package file {0} not found'.format(pkg))
            else:
                to_, op_, re_ = self._check_all_deps(pkg_name=pkg)
                to_install.extend(to_)
                optional.extend(op_)
                recommended.extend(re_)

        optional = set(filter(len, optional))
        if optional:
            self.ui.status('The following dependencies are optional:\n\t{0}\n'.format(
                '\n\t'.join(optional)
            ))
        recommended = set(filter(len, recommended))
        if recommended:
            self.ui.status('The following dependencies are recommended:\n\t{0}\n'.format(
                '\n\t'.join(recommended)
            ))

        to_install = set(filter(len, to_install))
        msg = 'Installing packages:\n\t{0}\n'.format('\n\t'.join(to_install))
        if not self.opts['assume_yes']:
            self.ui.confirm(msg)

        repo_metadata = self._get_repo_metadata()

        dl_list = {}
        for package in to_install:
            if package in file_map:
                self._install_indv_pkg(package, file_map[package])
            else:
                for repo in repo_metadata:
                    repo_info = repo_metadata[repo]
                    if package in repo_info['packages']:
                        dl_package = False
                        repo_ver = repo_info['packages'][package]['info']['version']
                        repo_rel = repo_info['packages'][package]['info']['release']
                        repo_url = repo_info['info']['url']
                        if package in dl_list:
                            # Check package version, replace if newer version
                            if repo_ver == dl_list[package]['version']:
                                # Version is the same, check release
                                if repo_rel > dl_list[package]['release']:
                                    dl_package = True
                                elif repo_rel == dl_list[package]['release']:
                                    # Version and release are the same, give
                                    # preference to local (file://) repos
                                    if dl_list[package]['source'].startswith('file://'):
                                        if not repo_url.startswith('file://'):
                                            dl_package = True
                            elif repo_ver > dl_list[package]['version']:
                                dl_package = True
                        else:
                            dl_package = True

                        if dl_package is True:
                            # Put together download directory
                            cache_path = os.path.join(
                                self.opts['spm_cache_dir'],
                                repo
                            )

                            # Put together download paths
                            dl_url = '{0}/{1}'.format(
                                repo_info['info']['url'],
                                repo_info['packages'][package]['filename']
                            )
                            out_file = os.path.join(
                                cache_path,
                                repo_info['packages'][package]['filename']
                            )
                            dl_list[package] = {
                                'version': repo_ver,
                                'release': repo_rel,
                                'source': dl_url,
                                'dest_dir': cache_path,
                                'dest_file': out_file,
                            }

        for package in dl_list:
            dl_url = dl_list[package]['source']
            cache_path = dl_list[package]['dest_dir']
            out_file = dl_list[package]['dest_file']

            # Make sure download directory exists
            if not os.path.exists(cache_path):
                os.makedirs(cache_path)

            # Download the package
            if dl_url.startswith('file://'):
                dl_url = dl_url.replace('file://', '')
                shutil.copyfile(dl_url, out_file)
            else:
                with salt.utils.fopen(out_file, 'w') as outf:
                    outf.write(self._query_http(dl_url, repo_info['info']))

        # First we download everything, then we install
        for package in dl_list:
            # Kick off the install
            self._install_indv_pkg(package, out_file)
        return
Exemple #17
0
    def _install(self, args):
        '''
        Install a package from a repo
        '''
        if len(args) < 2:
            raise SPMInvocationError('A package must be specified')

        cache = salt.cache.Cache(self.opts)

        packages = args[1:]
        file_map = {}
        optional = []
        recommended = []
        to_install = []
        for pkg in packages:
            if pkg.endswith('.spm'):
                if self._pkgfiles_fun('path_exists', pkg):
                    comps = pkg.split('-')
                    comps = '-'.join(comps[:-2]).split('/')
                    pkg_name = comps[-1]

                    formula_tar = tarfile.open(pkg, 'r:bz2')
                    formula_ref = formula_tar.extractfile(
                        '{0}/FORMULA'.format(pkg_name))
                    formula_def = yaml.safe_load(formula_ref)

                    file_map[pkg_name] = pkg
                    to_, op_, re_ = self._check_all_deps(
                        pkg_name=pkg_name,
                        pkg_file=pkg,
                        formula_def=formula_def)
                    to_install.extend(to_)
                    optional.extend(op_)
                    recommended.extend(re_)
                else:
                    raise SPMInvocationError(
                        'Package file {0} not found'.format(pkg))
            else:
                to_, op_, re_ = self._check_all_deps(pkg_name=pkg)
                to_install.extend(to_)
                optional.extend(op_)
                recommended.extend(re_)

        optional = set(filter(len, optional))
        if optional:
            self.ui.status(
                'The following dependencies are optional:\n\t{0}\n'.format(
                    '\n\t'.join(optional)))
        recommended = set(filter(len, recommended))
        if recommended:
            self.ui.status(
                'The following dependencies are recommended:\n\t{0}\n'.format(
                    '\n\t'.join(recommended)))

        to_install = set(filter(len, to_install))
        msg = 'Installing packages:\n\t{0}\n'.format('\n\t'.join(to_install))
        if not self.opts['assume_yes']:
            self.ui.confirm(msg)

        repo_metadata = self._get_repo_metadata()

        for package in to_install:
            if package in file_map:
                self._install_indv_pkg(package, file_map[package])
            else:
                for repo in repo_metadata:
                    repo_info = repo_metadata[repo]
                    if package in repo_metadata[repo]['packages']:
                        cache_path = '{0}/{1}'.format(
                            self.opts['spm_cache_dir'], repo)
                        # Download the package
                        dl_path = '{0}/{1}'.format(
                            repo_info['info']['url'],
                            repo_info['packages'][package]['filename'])
                        out_file = '{0}/{1}'.format(
                            cache_path,
                            repo_info['packages'][package]['filename'])
                        if not os.path.exists(cache_path):
                            os.makedirs(cache_path)

                        if dl_path.startswith('file://'):
                            dl_path = dl_path.replace('file://', '')
                            shutil.copyfile(dl_path, out_file)
                        else:
                            response = http.query(dl_path, text=True)
                            with salt.utils.fopen(out_file, 'w') as outf:
                                outf.write(response.get("text"))

                        # Kick off the install
                        self._install_indv_pkg(package, out_file)
        return
Exemple #18
0
    def _install(self, args):
        """
        Install a package from a repo
        """
        if len(args) < 2:
            raise SPMInvocationError("A package must be specified")

        caller_opts = self.opts.copy()
        caller_opts["file_client"] = "local"
        self.caller = salt.client.Caller(mopts=caller_opts)
        self.client = salt.client.get_local_client(self.opts["conf_file"])
        cache = salt.cache.Cache(self.opts)

        packages = args[1:]
        file_map = {}
        optional = []
        recommended = []
        to_install = []
        for pkg in packages:
            if pkg.endswith(".spm"):
                if self._pkgfiles_fun("path_exists", pkg):
                    comps = pkg.split("-")
                    comps = os.path.split("-".join(comps[:-2]))
                    pkg_name = comps[-1]

                    formula_tar = tarfile.open(pkg, "r:bz2")
                    formula_ref = formula_tar.extractfile(
                        "{0}/FORMULA".format(pkg_name))
                    formula_def = salt.utils.yaml.safe_load(formula_ref)

                    file_map[pkg_name] = pkg
                    to_, op_, re_ = self._check_all_deps(
                        pkg_name=pkg_name,
                        pkg_file=pkg,
                        formula_def=formula_def)
                    to_install.extend(to_)
                    optional.extend(op_)
                    recommended.extend(re_)
                    formula_tar.close()
                else:
                    raise SPMInvocationError(
                        "Package file {0} not found".format(pkg))
            else:
                to_, op_, re_ = self._check_all_deps(pkg_name=pkg)
                to_install.extend(to_)
                optional.extend(op_)
                recommended.extend(re_)

        optional = set(filter(len, optional))
        if optional:
            self.ui.status(
                "The following dependencies are optional:\n\t{0}\n".format(
                    "\n\t".join(optional)))
        recommended = set(filter(len, recommended))
        if recommended:
            self.ui.status(
                "The following dependencies are recommended:\n\t{0}\n".format(
                    "\n\t".join(recommended)))

        to_install = set(filter(len, to_install))
        msg = "Installing packages:\n\t{0}\n".format("\n\t".join(to_install))
        if not self.opts["assume_yes"]:
            self.ui.confirm(msg)

        repo_metadata = self._get_repo_metadata()

        dl_list = {}
        for package in to_install:
            if package in file_map:
                self._install_indv_pkg(package, file_map[package])
            else:
                for repo in repo_metadata:
                    repo_info = repo_metadata[repo]
                    if package in repo_info["packages"]:
                        dl_package = False
                        repo_ver = repo_info["packages"][package]["info"][
                            "version"]
                        repo_rel = repo_info["packages"][package]["info"][
                            "release"]
                        repo_url = repo_info["info"]["url"]
                        if package in dl_list:
                            # Check package version, replace if newer version
                            if repo_ver == dl_list[package]["version"]:
                                # Version is the same, check release
                                if repo_rel > dl_list[package]["release"]:
                                    dl_package = True
                                elif repo_rel == dl_list[package]["release"]:
                                    # Version and release are the same, give
                                    # preference to local (file://) repos
                                    if dl_list[package]["source"].startswith(
                                            "file://"):
                                        if not repo_url.startswith("file://"):
                                            dl_package = True
                            elif repo_ver > dl_list[package]["version"]:
                                dl_package = True
                        else:
                            dl_package = True

                        if dl_package is True:
                            # Put together download directory
                            cache_path = os.path.join(
                                self.opts["spm_cache_dir"], repo)

                            # Put together download paths
                            dl_url = "{0}/{1}".format(
                                repo_info["info"]["url"],
                                repo_info["packages"][package]["filename"],
                            )
                            out_file = os.path.join(
                                cache_path,
                                repo_info["packages"][package]["filename"])
                            dl_list[package] = {
                                "version": repo_ver,
                                "release": repo_rel,
                                "source": dl_url,
                                "dest_dir": cache_path,
                                "dest_file": out_file,
                            }

        for package in dl_list:
            dl_url = dl_list[package]["source"]
            cache_path = dl_list[package]["dest_dir"]
            out_file = dl_list[package]["dest_file"]

            # Make sure download directory exists
            if not os.path.exists(cache_path):
                os.makedirs(cache_path)

            # Download the package
            if dl_url.startswith("file://"):
                dl_url = dl_url.replace("file://", "")
                shutil.copyfile(dl_url, out_file)
            else:
                with salt.utils.files.fopen(out_file, "w") as outf:
                    outf.write(self._query_http(dl_url, repo_info["info"]))

        # First we download everything, then we install
        for package in dl_list:
            out_file = dl_list[package]["dest_file"]
            # Kick off the install
            self._install_indv_pkg(package, out_file)
        return
Exemple #19
0
def set_fstab(
    name,
    device,
    fstype,
    opts="defaults",
    dump=0,
    pass_num=0,
    config="/etc/fstab",
    test=False,
    match_on="auto",
    **kwargs
):
    """
    Verify that this mount is represented in the fstab, change the mount
    to match the data passed, or add the mount if it is not present.

    CLI Example:

    .. code-block:: bash

        salt '*' mount.set_fstab /mnt/foo /dev/sdz1 ext4
    """

    # Fix the opts type if it is a list
    if isinstance(opts, list):
        opts = ",".join(opts)

    # preserve arguments for updating
    entry_args = {"name": name, "device": device, "fstype": fstype, "opts": opts, "dump": dump, "pass_num": pass_num}

    lines = []
    ret = None

    # Transform match_on into list--items will be checked later
    if isinstance(match_on, list):
        pass
    elif not isinstance(match_on, six.string_types):
        msg = "match_on must be a string or list of strings"
        raise CommandExecutionError(msg)
    elif match_on == "auto":
        # Try to guess right criteria for auto....
        # NOTE: missing some special fstypes here
        specialFSes = frozenset(
            ["none", "tmpfs", "sysfs", "proc", "fusectl", "debugfs", "securityfs", "devtmpfs", "cgroup"]
        )

        if fstype in specialFSes:
            match_on = ["name"]
        else:
            match_on = ["device"]
    else:
        match_on = [match_on]

    # generate entry and criteria objects, handle invalid keys in match_on
    entry = _fstab_entry(**entry_args)
    try:
        criteria = entry.pick(match_on)

    except KeyError:
        filterFn = lambda key: key not in _fstab_entry.fstab_keys
        invalid_keys = filter(filterFn, match_on)

        msg = 'Unrecognized keys in match_on: "{0}"'.format(invalid_keys)
        raise CommandExecutionError(msg)

    # parse file, use ret to cache status
    if not os.path.isfile(config):
        raise CommandExecutionError('Bad config file "{0}"'.format(config))

    try:
        with salt.utils.fopen(config, "r") as ifile:
            for line in ifile:
                try:
                    if criteria.match(line):
                        # Note: If ret isn't None here,
                        # we've matched multiple lines
                        ret = "present"
                        if entry.match(line):
                            lines.append(line)
                        else:
                            ret = "change"
                            lines.append(str(entry))
                    else:
                        lines.append(line)

                except _fstab_entry.ParseError:
                    lines.append(line)

    except (IOError, OSError) as exc:
        msg = "Couldn't read from {0}: {1}"
        raise CommandExecutionError(msg.format(config, str(exc)))

    # add line if not present or changed
    if ret is None:
        lines.append(str(entry))
        ret = "new"

    if ret != "present":  # ret in ['new', 'change']:
        if not salt.utils.test_mode(test=test, **kwargs):
            try:
                with salt.utils.fopen(config, "w+") as ofile:
                    # The line was changed, commit it!
                    ofile.writelines(lines)
            except (IOError, OSError):
                msg = "File not writable {0}"
                raise CommandExecutionError(msg.format(config))

    return ret
Exemple #20
0
def set_fstab(
        name,
        device,
        fstype,
        opts='defaults',
        dump=0,
        pass_num=0,
        config='/etc/fstab',
        test=False,
        match_on='auto',
        **kwargs):
    '''
    Verify that this mount is represented in the fstab, change the mount
    to match the data passed, or add the mount if it is not present.

    CLI Example:

    .. code-block:: bash

        salt '*' mount.set_fstab /mnt/foo /dev/sdz1 ext4
    '''

    # Fix the opts type if it is a list
    if isinstance(opts, list):
        opts = ','.join(opts)

    # preserve arguments for updating
    entry_args = {
        'name': name,
        'device': device,
        'fstype': fstype,
        'opts': opts,
        'dump': dump,
        'pass_num': pass_num,
    }

    lines = []
    ret = None

    # Transform match_on into list--items will be checked later
    if isinstance(match_on, list):
        pass
    elif not isinstance(match_on, six.string_types):
        msg = 'match_on must be a string or list of strings'
        raise CommandExecutionError(msg)
    elif match_on == 'auto':
        # Try to guess right criteria for auto....
        # NOTE: missing some special fstypes here
        specialFSes = frozenset([
            'none',
            'tmpfs',
            'sysfs',
            'proc',
            'fusectl',
            'debugfs',
            'securityfs',
            'devtmpfs',
            'cgroup'])

        if fstype in specialFSes:
            match_on = ['name']
        else:
            match_on = ['device']
    else:
        match_on = [match_on]

    # generate entry and criteria objects, handle invalid keys in match_on
    entry = _fstab_entry(**entry_args)
    try:
        criteria = entry.pick(match_on)

    except KeyError:
        filterFn = lambda key: key not in _fstab_entry.fstab_keys
        invalid_keys = filter(filterFn, match_on)

        msg = 'Unrecognized keys in match_on: "{0}"'.format(invalid_keys)
        raise CommandExecutionError(msg)

    # parse file, use ret to cache status
    if not os.path.isfile(config):
        raise CommandExecutionError('Bad config file "{0}"'.format(config))

    try:
        with salt.utils.fopen(config, 'r') as ifile:
            for line in ifile:
                try:
                    if criteria.match(line):
                        # Note: If ret isn't None here,
                        # we've matched multiple lines
                        ret = 'present'
                        if entry.match(line):
                            lines.append(line)
                        else:
                            ret = 'change'
                            lines.append(str(entry))
                    else:
                        lines.append(line)

                except _fstab_entry.ParseError:
                    lines.append(line)

    except (IOError, OSError) as exc:
        msg = 'Couldn\'t read from {0}: {1}'
        raise CommandExecutionError(msg.format(config, str(exc)))

    # add line if not present or changed
    if ret is None:
        lines.append(str(entry))
        ret = 'new'

    if ret != 'present':  # ret in ['new', 'change']:
        if not salt.utils.test_mode(test=test, **kwargs):
            try:
                with salt.utils.fopen(config, 'w+') as ofile:
                    # The line was changed, commit it!
                    ofile.writelines(lines)
            except (IOError, OSError):
                msg = 'File not writable {0}'
                raise CommandExecutionError(msg.format(config))

    return ret
Exemple #21
0
    def _install(self, args):
        '''
        Install a package from a repo
        '''
        if len(args) < 2:
            raise SPMInvocationError('A package must be specified')

        packages = args[1:]
        file_map = {}
        optional = []
        recommended = []
        to_install = []
        for pkg in packages:
            if pkg.endswith('.spm'):
                if self._pkgfiles_fun('path_exists', pkg):
                    comps = pkg.split('-')
                    comps = '-'.join(comps[:-2]).split('/')
                    pkg_name = comps[-1]

                    formula_tar = tarfile.open(pkg, 'r:bz2')
                    formula_ref = formula_tar.extractfile('{0}/FORMULA'.format(pkg_name))
                    formula_def = yaml.safe_load(formula_ref)

                    file_map[pkg_name] = pkg
                    to_, op_, re_ = self._check_all_deps(
                        pkg_name=pkg_name,
                        pkg_file=pkg,
                        formula_def=formula_def
                    )
                    to_install.extend(to_)
                    optional.extend(op_)
                    recommended.extend(re_)
                else:
                    raise SPMInvocationError('Package file {0} not found'.format(pkg))
            else:
                to_, op_, re_ = self._check_all_deps(pkg_name=pkg)
                to_install.extend(to_)
                optional.extend(op_)
                recommended.extend(re_)

        optional = set(filter(len, optional))
        self.ui.status('The following dependencies are optional:\n\t{0}\n'.format(
            '\n\t'.join(optional)
        ))
        recommended = set(filter(len, recommended))
        self.ui.status('The following dependencies are recommended:\n\t{0}\n'.format(
            '\n\t'.join(recommended)
        ))

        to_install = set(filter(len, to_install))
        msg = 'Installing packages:\n\t{0}\n'.format('\n\t'.join(to_install))
        if not self.opts['assume_yes']:
            self.ui.confirm(msg)

        repo_metadata = self._get_repo_metadata()

        for package in to_install:
            if package in file_map:
                self._install_indv_pkg(package, file_map[package])
            else:
                for repo in repo_metadata:
                    repo_info = repo_metadata[repo]
                    if package in repo_metadata[repo]['packages']:
                        cache_path = '{0}/{1}'.format(
                            self.opts['spm_cache_dir'],
                            repo
                        )
                        # Download the package
                        dl_path = '{0}/{1}'.format(
                            repo_info['info']['url'],
                            repo_info['packages'][package]['filename']
                        )
                        out_file = '{0}/{1}'.format(
                            cache_path,
                            repo_info['packages'][package]['filename']
                        )
                        if not os.path.exists(cache_path):
                            os.makedirs(cache_path)

                        if dl_path.startswith('file://'):
                            dl_path = dl_path.replace('file://', '')
                            shutil.copyfile(dl_path, out_file)
                        else:
                            response = http.query(dl_path, text=True)
                            with salt.utils.fopen(out_file, 'w') as outf:
                                outf.write(response.get("text"))

                        # Kick off the install
                        self._install_indv_pkg(package, out_file)
        return
Exemple #22
0
def set_crypttab(name,
                 device,
                 password='******',
                 options='',
                 config='/etc/crypttab',
                 test=False,
                 match_on='name'):
    '''
    Verify that this device is represented in the crypttab, change the device to
    match the name passed, or add the name if it is not present.

    CLI Example:

    .. code-block:: bash

        salt '*' cryptdev.set_crypttab foo /dev/sdz1 mypassword swap,size=256
    '''

    # Fix the options type if it is not a string
    if options is None:
        options = ''
    elif isinstance(options, six.string_types):
        pass
    elif isinstance(options, list):
        options = ','.join(options)
    else:
        msg = 'options must be a string or list of strings'
        raise CommandExecutionError(msg)

    # preserve arguments for updating
    entry_args = {
        'name': name,
        'device': device,
        'password': password if password is not None else 'none',
        'options': options,
    }

    lines = []
    ret = None

    # Transform match_on into list--items will be checked later
    if isinstance(match_on, list):
        pass
    elif not isinstance(match_on, six.string_types):
        msg = 'match_on must be a string or list of strings'
        raise CommandExecutionError(msg)
    else:
        match_on = [match_on]

    # generate entry and criteria objects, handle invalid keys in match_on
    entry = _crypttab_entry(**entry_args)
    try:
        criteria = entry.pick(match_on)

    except KeyError:
        filterFn = lambda key: key not in _crypttab_entry.crypttab_keys
        invalid_keys = filter(filterFn, match_on)

        msg = 'Unrecognized keys in match_on: "{0}"'.format(invalid_keys)
        raise CommandExecutionError(msg)

    # parse file, use ret to cache status
    if not os.path.isfile(config):
        raise CommandExecutionError('Bad config file "{0}"'.format(config))

    try:
        with salt.utils.files.fopen(config, 'r') as ifile:
            for line in ifile:
                try:
                    if criteria.match(line):
                        # Note: If ret isn't None here,
                        # we've matched multiple lines
                        ret = 'present'
                        if entry.match(line):
                            lines.append(line)
                        else:
                            ret = 'change'
                            lines.append(str(entry))
                    else:
                        lines.append(line)

                except _crypttab_entry.ParseError:
                    lines.append(line)

    except (IOError, OSError) as exc:
        msg = 'Couldn\'t read from {0}: {1}'
        raise CommandExecutionError(msg.format(config, str(exc)))

    # add line if not present or changed
    if ret is None:
        lines.append(str(entry))
        ret = 'new'

    if ret != 'present':  # ret in ['new', 'change']:
        if not test:
            try:
                with salt.utils.files.fopen(config, 'w+') as ofile:
                    # The line was changed, commit it!
                    ofile.writelines(lines)
            except (IOError, OSError):
                msg = 'File not writable {0}'
                raise CommandExecutionError(msg.format(config))

    return ret