コード例 #1
0
def fips_status():
    """
    Returns the status of fips on the currently running system.

    Returns a `str` of "enabled" if FIPS is enabled. Otherwise,
    returns a `str` of "disabled".

    CLI Example:
    .. code-block:: bash
        salt '*' ash.fips_status
    """
    try:
        with fopen('/proc/sys/crypto/fips_enabled', 'r') as fle:
            return 'enabled' if fle.read().strip() == '1' else 'disabled'
    except (IOError, FileNotFoundError):
        return 'disabled'
コード例 #2
0
def _parse_oratab(sid):
    '''
    Return ORACLE_HOME for a given SID found in oratab

    Note: only works with Unix-like minions
    '''
    if __grains__.get('kernel') in ('Linux', 'AIX', 'FreeBSD', 'OpenBSD', 'NetBSD'):
        ORATAB = '/etc/oratab'
    elif __grains__.get('kernel') in 'SunOS':
        ORATAB = '/var/opt/oracle/oratab'
    else:
        # Windows has no oratab file
        raise CommandExecutionError(
            'No uri defined for {0} and oratab not available in this OS'.format(sid))
    with fopen(ORATAB, 'r') as f:
        while True:
            line = f.readline()
            if not line:
                break
            if line.startswith('#'):
                continue
            if sid in line.split(':')[0]:
                return line.split(':')[1]
    return None
コード例 #3
0
def extract(template_path, raw_text=None, raw_text_file=None, saltenv='base'):
    r'''
    Extracts the data entities from the unstructured
    raw text sent as input and returns the data
    mapping, processing using the TextFSM template.

    template_path
        The path to the TextFSM template.
        This can be specified using the absolute path
        to the file, or using one of the following URL schemes:

        - ``salt://``, to fetch the template from the Salt fileserver.
        - ``http://`` or ``https://``
        - ``ftp://``
        - ``s3://``
        - ``swift://``

    raw_text: ``None``
        The unstructured text to be parsed.

    raw_text_file: ``None``
        Text file to read, having the raw text to be parsed using the TextFSM template.
        Supports the same URL schemes as the ``template_path`` argument.

    saltenv: ``base``
        Salt fileserver envrionment from which to retrieve the file.
        Ignored if ``template_path`` is not a ``salt://`` URL.

    CLI Example:

    .. code-block:: bash

        salt '*' textfsm.extract salt://textfsm/juniper_version_template raw_text_file=s3://junos_ver.txt
        salt '*' textfsm.extract http://some-server/textfsm/juniper_version_template raw_text='Hostname: router.abc ... snip ...'

    Jinja template example:

    .. code-block:: jinja

        {%- set raw_text = 'Hostname: router.abc ... snip ...' -%}
        {%- set textfsm_extract = salt.textfsm.extract('https://some-server/textfsm/juniper_version_template', raw_text) -%}

    Raw text example:

    .. code-block:: text

        Hostname: router.abc
        Model: mx960
        JUNOS Base OS boot [9.1S3.5]
        JUNOS Base OS Software Suite [9.1S3.5]
        JUNOS Kernel Software Suite [9.1S3.5]
        JUNOS Crypto Software Suite [9.1S3.5]
        JUNOS Packet Forwarding Engine Support (M/T Common) [9.1S3.5]
        JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5]
        JUNOS Online Documentation [9.1S3.5]
        JUNOS Routing Software Suite [9.1S3.5]

    TextFSM Example:

    .. code-block:: text

        Value Chassis (\S+)
        Value Required Model (\S+)
        Value Boot (.*)
        Value Base (.*)
        Value Kernel (.*)
        Value Crypto (.*)
        Value Documentation (.*)
        Value Routing (.*)

        Start
        # Support multiple chassis systems.
          ^\S+:$$ -> Continue.Record
          ^${Chassis}:$$
          ^Model: ${Model}
          ^JUNOS Base OS boot \[${Boot}\]
          ^JUNOS Software Release \[${Base}\]
          ^JUNOS Base OS Software Suite \[${Base}\]
          ^JUNOS Kernel Software Suite \[${Kernel}\]
          ^JUNOS Crypto Software Suite \[${Crypto}\]
          ^JUNOS Online Documentation \[${Documentation}\]
          ^JUNOS Routing Software Suite \[${Routing}\]

    Output example:

    .. code-block:: json

        {
            "comment": "",
            "result": true,
            "out": [
                {
                    "kernel": "9.1S3.5",
                    "documentation": "9.1S3.5",
                    "boot": "9.1S3.5",
                    "crypto": "9.1S3.5",
                    "chassis": "",
                    "routing": "9.1S3.5",
                    "base": "9.1S3.5",
                    "model": "mx960"
                }
            ]
        }
    '''
    ret = {'result': False, 'comment': '', 'out': None}
    log.debug('Using the saltenv: {}'.format(saltenv))
    log.debug('Caching {} using the Salt fileserver'.format(template_path))
    tpl_cached_path = __salt__['cp.cache_file'](template_path, saltenv=saltenv)
    if tpl_cached_path is False:
        ret['comment'] = 'Unable to read the TextFSM template from {}'.format(
            template_path)
        log.error(ret['comment'])
        return ret
    try:
        log.debug('Reading TextFSM template from cache path: {}'.format(
            tpl_cached_path))
        # Disabling pylint W8470 to nto complain about fopen.
        # Unfortunately textFSM needs the file handle rather than the content...
        # pylint: disable=W8470
        tpl_file_handle = fopen(tpl_cached_path, 'r')
        # pylint: disable=W8470
        log.debug(tpl_file_handle.read())
        tpl_file_handle.seek(
            0)  # move the object position back at the top of the file
        fsm_handler = textfsm.TextFSM(tpl_file_handle)
    except textfsm.TextFSMTemplateError as tfte:
        log.error('Unable to parse the TextFSM template', exc_info=True)
        ret['comment'] = 'Unable to parse the TextFSM template from {}: {}. Please check the logs.'.format(
            template_path, tfte)
        return ret
    if not raw_text and raw_text_file:
        log.debug('Trying to read the raw input from {}'.format(raw_text_file))
        raw_text = __salt__['cp.get_file_str'](raw_text_file, saltenv=saltenv)
        if raw_text is False:
            ret['comment'] = 'Unable to read from {}. Please specify a valid input file or text.'.format(
                raw_text_file)
            log.error(ret['comment'])
            return ret
    if not raw_text:
        ret['comment'] = 'Please specify a valid input file or text.'
        log.error(ret['comment'])
        return ret
    log.debug('Processing the raw text:')
    log.debug(raw_text)
    objects = fsm_handler.ParseText(raw_text)
    ret['out'] = _clitable_to_dict(objects, fsm_handler)
    ret['result'] = True
    return ret
コード例 #4
0
def restore(name=None, clean=False, **kwargs):
    '''
    Make sure that the system contains the packages and repos from a
    frozen state.

    Read the list of packages and repositories from the freeze file,
    and compare it with the current list of packages and repos. If
    there is any difference, all the missing packages are repos will
    be installed, and all the extra packages and repos will be
    removed.

    As this module is build on top of the pkg module, the user can
    send extra attributes to the underlying pkg module via kwargs.
    This function will call ``pkg.list_repos``, ``pkg.mod_repo``,
    ``pkg.list_pkgs``, ``pkg.install``, ``pkg.remove`` and
    ``pkg.del_repo``, and any additional arguments will be passed
    through to those functions.

    name
        Name of the frozen state. Optional.

    clean
        If True remove the frozen information YAML from the cache

        .. versionadded:: 3000

    CLI Example:

    .. code-block:: bash

        salt '*' freezer.restore
        salt '*' freezer.restore root=/chroot

    '''
    if not status(name):
        raise CommandExecutionError('Frozen state not found.')

    frozen_pkgs = {}
    frozen_repos = {}
    for fname, content in zip(_paths(name), (frozen_pkgs, frozen_repos)):
        with fopen(fname) as fp:
            content.update(json.load(fp))

    # The ordering of removing or adding packages and repos can be
    # relevant, as maybe some missing package comes from a repo that
    # is also missing, so it cannot be installed. But can also happend
    # that a missing package comes from a repo that is present, but
    # will be removed.
    #
    # So the proposed order is;
    #   - Add missing repos
    #   - Add missing packages
    #   - Remove extra packages
    #   - Remove extra repos

    safe_kwargs = clean_kwargs(**kwargs)

    # Note that we expect that the information stored in list_XXX
    # match with the mod_XXX counterpart. If this is not the case the
    # recovery will be partial.

    ret = {
        'pkgs': {
            'add': [],
            'remove': []
        },
        'repos': {
            'add': [],
            'remove': []
        },
        'comment': [],
    }

    _add_missing_repositories(frozen_repos, ret, **safe_kwargs)
    _add_missing_packages(frozen_pkgs, ret, **safe_kwargs)
    _remove_extra_packages(frozen_pkgs, ret, **safe_kwargs)
    _remove_extra_repositories(frozen_repos, ret, **safe_kwargs)

    # Clean the cached YAML files
    if clean and not ret['comment']:
        for fname in _paths(name):
            os.remove(fname)

    return ret
コード例 #5
0
    def run(self):
        '''
        Execute salt-run
        '''
        self.parse_args()

        if self.config.get('config_dump'):
            sys.stdout.write(safe_dump(self.config, default_flow_style=False))
            return self.config

        # Setup file logging!
        self.setup_logfile_logger()
        verify_log(self.config)
        profiling_enabled = self.options.profiling_enabled
        curpath = os.path.dirname(os.path.realpath(__file__))
        saltenv = self.config.get('saltenv_cli', self.config.get('saltenv'))
        if not saltenv:
            saltenv = 'base'
        self.config['saltenv'] = saltenv
        if self.config.get('pillar_root'):
            log.info(
                'Setting and using %s as the Pillar root', self.config['pillar_root']
            )
            self.config['pillar_roots'] = {saltenv: self.config['pillar_root']}
        if self.config.get('file_root'):
            log.info(
                'Setting and using %s as the Salt file root', self.config['file_root']
            )
            self.config['file_root'] = {saltenv: self.config['file_root']}
        if self.config.get('installation_path'):
            salt.utils.stringutils.print_cli(curpath)
            return
        if self.config.get('display_file_roots'):
            salt.utils.stringutils.print_cli(
                'salt-sproxy is installed at: {}'.format(curpath)
            )
            salt.utils.stringutils.print_cli(
                '\nYou can configure the file_roots on the Master, e.g.,\n'
            )
            salt.utils.stringutils.print_cli(
                'file_roots:\n  {0}:\n    - {1}'.format(saltenv, curpath)
            )
            salt.utils.stringutils.print_cli('\n\nOr only for the Runners:\n')
            salt.utils.stringutils.print_cli(
                'runner_dirs:\n  - {}/_runners'.format(curpath)
            )
            return
        if self.config.get('save_file_roots'):
            updated = False
            with fopen(self.config['conf_file'], 'r+') as master_fp:
                master_cfg = safe_load(master_fp)
                if not master_cfg:
                    master_cfg = {}
                file_roots = master_cfg.get('file_roots', {saltenv: []}).get(
                    saltenv, []
                )
                runner_dirs = master_cfg.get('runner_dirs', [])
                sproxy_runners = os.path.join(curpath, '_runners')
                if curpath not in file_roots:
                    file_roots.append(curpath)
                    master_cfg['file_roots'] = {saltenv: file_roots}
                    updated = True
                    salt.utils.stringutils.print_cli(
                        '{} added to the file_roots:\n'.format(curpath)
                    )
                    salt.utils.stringutils.print_cli(
                        'file_roots:\n  {0}\n    - {1}\n'.format(
                            saltenv, '\n    -'.join(file_roots)
                        )
                    )
                if sproxy_runners not in runner_dirs:
                    runner_dirs.append(sproxy_runners)
                    master_cfg['runner_dirs'] = runner_dirs
                    updated = True
                    salt.utils.stringutils.print_cli(
                        '{} added to runner_dirs:\n'.format(sproxy_runners)
                    )
                    salt.utils.stringutils.print_cli(
                        'runner_dirs:\n  - {0}'.format('\n  - '.join(runner_dirs))
                    )
                if updated:
                    master_fp.seek(0)
                    safe_dump(master_cfg, master_fp, default_flow_style=False)
                    log.debug('Syncing Runners on the Master')
                    runner_client = salt.runner.RunnerClient(self.config)
                    sync_runners = runner_client.cmd(
                        'saltutil.sync_all',
                        kwarg={'saltenv': saltenv},
                        print_event=False,
                    )
                    log.debug('saltutil.sync_all output:')
                    log.debug(sync_runners)
                else:
                    salt.utils.stringutils.print_cli(
                        'The {} path is already included into the file_roots and runner_dirs'.format(
                            curpath
                        )
                    )
                salt.utils.stringutils.print_cli(
                    '\nNow you can start using salt-sproxy for '
                    'event-driven automation, and the Salt REST API.\n'
                    'See https://salt-sproxy.readthedocs.io/en/latest/salt_api.html'
                    '\nand https://salt-sproxy.readthedocs.io/en/latest/events.html '
                    'for more details.'
                )
            return
        # The code below executes the Runner sequence, but it swaps the function
        # to be invoked, and instead call ``napalm.execute``, passing the
        # function requested by the user from the CLI, as an argument.
        # The same goes with the CLI options that are sent as kwargs to the
        # proxy Runner.
        tgt = self.config['tgt']
        fun = self.config['fun']
        args = self.config['arg']
        kwargs = {}
        if 'output' not in self.config and fun in (
            'state.sls',
            'state.apply',
            'state.highstate',
        ):
            self.config['output'] = 'highstate'
        kwargs['progress'] = self.config.pop('progress', False)
        # To be able to reuse the proxy Runner (which is not yet available
        # natively in Salt), we can override the ``runner_dirs`` configuration
        # option to tell Salt to load that Runner too. This way, we can also
        # load other types of modules that may be required or we provide fixes
        # or backports - for example the Ansible Roster which doesn't work fine
        # pre Salt 2018.3 (in case anyone would like to use it).
        file_roots = self.config.get('file_roots', {saltenv: []})
        if saltenv not in file_roots:
            file_roots[saltenv] = []
        file_roots[saltenv].append(curpath)
        self.config['file_roots'] = file_roots
        runner_dirs = self.config.get('runner_dirs', [])
        runner_path = os.path.join(curpath, '_runners')
        runner_dirs.append(runner_path)
        self.config['runner_dirs'] = runner_dirs
        runner_client = None
        sync_all = self.config.get('sync_all', False)
        sync_grains = self.config.get('sync_grains', True)
        sync_modules = self.config.get('sync_modules', True)
        sync_roster = self.config.get('sync_roster', True)
        sync_proxy = self.config.get('sync_proxy', False)
        sync_executors = self.config.get('sync_executors', False)
        kwargs.update(
            {
                'sync_all': sync_all,
                'sync_roster': sync_roster,
                'sync_modules': sync_modules,
            }
        )
        if any(
            [
                sync_all,
                sync_grains,
                sync_modules,
                sync_roster,
                sync_proxy,
                sync_executors,
            ]
        ):
            runner_client = salt.runner.RunnerClient(self.config)
        if sync_all:
            log.debug('Sync all')
            sync_all_ret = runner_client.cmd(
                'saltutil.sync_all', kwarg={'saltenv': saltenv}, print_event=False
            )
            log.debug(sync_all_ret)
        if sync_grains and not sync_all:
            log.debug('Syncing grains')
            sync_grains_ret = runner_client.cmd(
                'saltutil.sync_grains',
                kwarg={
                    'saltenv': saltenv,
                    'extmod_whitelist': ','.join(
                        self.config.get('whitelist_grains', [])
                    ),
                    'extmod_blacklist': ','.join(self.config.get('disable_grains', [])),
                },
                print_event=False,
            )
            log.debug(sync_grains_ret)
        if self.config.get('module_dirs_cli'):
            log.debug(
                'Loading execution modules from the dirs provided via --module-dirs'
            )
            module_dirs = self.config.get('module_dirs', [])
            module_dirs.extend(self.config['module_dirs_cli'])
            self.config['module_dirs'] = module_dirs
        if sync_modules and not sync_all:
            # Don't sync modules by default
            log.debug('Syncing modules')
            module_dirs = self.config.get('module_dirs', [])
            module_path = os.path.join(curpath, '_modules')
            module_dirs.append(module_path)
            self.config['module_dirs'] = module_dirs
            # No need to explicitly load the modules here, as during runtime,
            # Salt is anyway going to load the modules on the fly.
            sync_modules_ret = runner_client.cmd(
                'saltutil.sync_modules',
                kwarg={
                    'saltenv': saltenv,
                    'extmod_whitelist': ','.join(
                        self.config.get('whitelist_modules', [])
                    ),
                    'extmod_blacklist': ','.join(
                        self.config.get('disable_modules', [])
                    ),
                },
                print_event=False,
            )
            log.debug(sync_modules_ret)
        # Resync Roster module to load the ones we have here in the library, and
        # potentially others provided by the user in their environment
        if sync_roster and not sync_all and self.config.get('roster'):
            # Sync Rosters by default
            log.debug('Syncing roster')
            roster_dirs = self.config.get('roster_dirs', [])
            roster_path = os.path.join(curpath, '_roster')
            roster_dirs.append(roster_path)
            self.config['roster_dirs'] = roster_dirs
            sync_roster_ret = runner_client.cmd(
                'saltutil.sync_roster',
                kwarg={'saltenv': saltenv, 'extmod_whitelist': self.config['roster']},
                print_event=False,
            )
            log.debug(sync_roster_ret)
        if sync_proxy and not sync_all:
            log.debug('Syncing Proxy modules')
            proxy_dirs = self.config.get('proxy_dirs', [])
            proxy_path = os.path.join(curpath, '_proxy')
            proxy_dirs.append(proxy_path)
            self.config['proxy_dirs'] = proxy_dirs
            sync_proxy_ret = runner_client.cmd(
                'saltutil.sync_proxymodules',
                kwarg={
                    'saltenv': saltenv,
                    'extmod_whitelist': ','.join(
                        self.config.get('whitelist_proxys', [])
                    ),
                    'extmod_blacklist': ','.join(self.config.get('disable_proxys', [])),
                },
                print_event=False,
            )
            log.debug(sync_proxy_ret)
        if sync_executors and not sync_all:
            log.debug('Syncing Executors modules')
            executor_dirs = self.config.get('executor_dirs', [])
            executor_path = os.path.join(curpath, '_executors')
            executor_dirs.append(executor_path)
            self.config['executor_dirs'] = executor_dirs
            sync_executors_ret = runner_client.cmd(
                'saltutil.sync_executors',
                kwarg={
                    'saltenv': saltenv,
                    'extmod_whitelist': ','.join(
                        self.config.get('whitelist_executors', [])
                    ),
                    'extmod_blacklist': ','.join(
                        self.config.get('disable_executors', [])
                    ),
                },
                print_event=False,
            )
            log.debug(sync_executors_ret)
        if self.config.get('states_dir'):
            states_dirs = self.config.get('states_dirs', [])
            states_dirs.append(self.config['states_dir'])
            self.config['states_dirs'] = states_dirs
        self.config['fun'] = 'proxy.execute'
        tmp_args = args[:]
        for index, arg in enumerate(tmp_args):
            if isinstance(arg, dict) and '__kwarg__' in arg:
                args.pop(index)
                kwargs = arg
        kwargs['__kwarg__'] = True
        tgt_types = (
            'compound',
            'list',
            'grain',
            'pcre',
            'grain_pcre',
            'pillar_pcre',
            'pillar_target',
            'nodegroup',
        )
        kwargs['tgt_type'] = 'glob'
        for tgt_type in tgt_types:
            if hasattr(self.options, tgt_type) and getattr(self.options, tgt_type):
                kwargs['tgt_type'] = tgt_type
        kwargs_opts = (
            'preview_target',
            'batch_size',
            'batch_wait',
            'roster',
            'timeout',
            'static',
            'no_connect',
            'failhard',
            'summary',
            'verbose',
            'show_jid',
            'hide_timeout',
            'progress',
            'returner',
            'target_cache',
            'returner_config',
            'returner_kwargs',
        )
        for kwargs_opt in kwargs_opts:
            if getattr(self.options, kwargs_opt) is not None:
                kwargs[kwargs_opt] = getattr(self.options, kwargs_opt)
        reverse_opts = {
            # option_name: runner_kwarg
            'no_cached_grains': 'use_cached_grains',
            'no_cached_pillar': 'use_cached_pillar',
            'no_grains': 'with_grains',
            'no_pillar': 'with_pillar',
            'dont_cache_grains': 'cache_grains',
            'dont_cache_pillar': 'cache_pillar',
        }
        for opt, kwarg in six.iteritems(reverse_opts):
            if getattr(self.options, opt):
                kwargs[kwarg] = False
        kwargs['events'] = self.config.get('events', False)
        kwargs['use_existing_proxy'] = self.config.get('use_existing_proxy', False)
        kwargs['test_ping'] = self.config.get('test_ping', False)
        kwargs['target_cache_timeout'] = self.config.get(
            'target_cache_timeout', 60
        )  # seconds
        kwargs['args'] = args
        kwargs['default_grains'] = self.config.get(
            'sproxy_grains',
            self.config.get('default_grains', self.config.get('grains')),
        )
        kwargs['default_pillar'] = self.config.get(
            'sproxy_pillar',
            self.config.get('default_pillar', self.config.get('pillar')),
        )
        kwargs['preload_targeting'] = self.config.get('preload_targeting', False)
        kwargs['invasive_targeting'] = self.config.get('invasive_targeting', False)
        kwargs['failhard'] = self.config.get('failhard', False)
        self.config['arg'] = [tgt, fun, kwargs]
        runner = salt.runner.Runner(self.config)

        if self.config.get('doc', True):
            # late import as salt.loader adds up some execution time, and we
            # don't want that, but only when displaying docs.
            from salt.loader import utils, grains, minion_mods

            runner.opts['fun'] = fun
            runner.opts['grains'] = grains(runner.opts)
            runner._functions = minion_mods(runner.opts, utils=utils(runner.opts))

        # Run this here so SystemExit isn't raised anywhere else when
        # someone tries to use the runners via the python API
        try:
            if check_user(self.config['user']):
                pr = activate_profile(profiling_enabled)
                try:
                    ret = runner.run()
                    # In older versions ret['data']['retcode'] was used
                    # for signaling the return code. This has been
                    # changed for the orchestrate runner, but external
                    # runners might still use it. For this reason, we
                    # also check ret['data']['retcode'] if
                    # ret['retcode'] is not available.
                    if 'retcode' in runner.context:
                        self.exit(runner.context['retcode'])
                    if isinstance(ret, dict) and 'retcode' in ret:
                        self.exit(ret['retcode'])
                    elif isinstance(ret, dict) and 'retcode' in ret.get('data', {}):
                        self.exit(ret['data']['retcode'])
                finally:
                    output_profile(
                        pr, stats_path=self.options.profiling_path, stop=True
                    )

        except SaltClientError as exc:
            raise SystemExit from exc
コード例 #6
0
def restore(name=None, **kwargs):
    '''
    Make sure that the system contains the packages and repos from a
    frozen state.

    Read the list of packages and repositories from the freeze file,
    and compare it with the current list of packages and repos. If
    there is any difference, all the missing packages are repos will
    be installed, and all the extra packages and repos will be
    removed.

    As this module is build on top of the pkg module, the user can
    send extra attributes to the underlying pkg module via kwargs.
    This function will call ``pkg.list_repos``, ``pkg.mod_repo``,
    ``pkg.list_pkgs``, ``pkg.install``, ``pkg.remove`` and
    ``pkg.del_repo``, and any additional arguments will be passed
    through to those functions.

    name
        Name of the frozen state. Optional.

    CLI Example:

    .. code-block:: bash

        salt '*' freezer.restore
        salt '*' freezer.restore root=/chroot

    '''
    if not status(name):
        raise CommandExecutionError('Frozen state not found.')

    frozen_pkgs = {}
    frozen_repos = {}
    for name, content in zip(_paths(name), (frozen_pkgs, frozen_repos)):
        with fopen(name) as fp:
            content.update(json.load(fp))

    # The ordering of removing or adding packages and repos can be
    # relevant, as maybe some missing package comes from a repo that
    # is also missing, so it cannot be installed. But can also happend
    # that a missing package comes from a repo that is present, but
    # will be removed.
    #
    # So the proposed order is;
    #   - Add missing repos
    #   - Add missing packages
    #   - Remove extra packages
    #   - Remove extra repos

    safe_kwargs = clean_kwargs(**kwargs)

    # Note that we expect that the information stored in list_XXX
    # match with the mod_XXX counterpart. If this is not the case the
    # recovery will be partial.

    res = {
        'pkgs': {
            'add': [],
            'remove': []
        },
        'repos': {
            'add': [],
            'remove': []
        },
        'comment': [],
    }

    # Add missing repositories
    repos = __salt__['pkg.list_repos'](**safe_kwargs)
    missing_repos = set(frozen_repos) - set(repos)
    for repo in missing_repos:
        try:
            # In Python 2 we cannot do advance destructuring, so we
            # need to create a temporary dictionary that will merge
            # all the parameters
            _tmp_kwargs = frozen_repos[repo].copy()
            _tmp_kwargs.update(safe_kwargs)
            __salt__['pkg.mod_repo'](repo, **_tmp_kwargs)
            res['repos']['add'].append(repo)
            log.info('Added missing repository %s', repo)
        except Exception as e:
            msg = 'Error adding %s repository: %s'
            log.error(msg, repo, e)
            res['comment'].append(msg % (repo, e))

    # Add missing packages
    # NOTE: we can remove the `for` using `pkgs`. This will improve
    # performance, but I want to have a more detalied report of what
    # packages are installed or failled.
    pkgs = __salt__['pkg.list_pkgs'](**safe_kwargs)
    missing_pkgs = set(frozen_pkgs) - set(pkgs)
    for pkg in missing_pkgs:
        try:
            __salt__['pkg.install'](name=pkg, **safe_kwargs)
            res['pkgs']['add'].append(pkg)
            log.info('Added missing package %s', pkg)
        except Exception as e:
            msg = 'Error adding %s package: %s'
            log.error(msg, pkg, e)
            res['comment'].append(msg % (pkg, e))

    # Remove extra packages
    pkgs = __salt__['pkg.list_pkgs'](**safe_kwargs)
    extra_pkgs = set(pkgs) - set(frozen_pkgs)
    for pkg in extra_pkgs:
        try:
            __salt__['pkg.remove'](name=pkg, **safe_kwargs)
            res['pkgs']['remove'].append(pkg)
            log.info('Removed extra package %s', pkg)
        except Exception as e:
            msg = 'Error removing %s package: %s'
            log.error(msg, pkg, e)
            res['comment'].append(msg % (pkg, e))

    # Remove extra repositories
    repos = __salt__['pkg.list_repos'](**safe_kwargs)
    extra_repos = set(repos) - set(frozen_repos)
    for repo in extra_repos:
        try:
            __salt__['pkg.del_repo'](repo, **safe_kwargs)
            res['repos']['remove'].append(repo)
            log.info('Removed extra repository %s', repo)
        except Exception as e:
            msg = 'Error removing %s repository: %s'
            log.error(msg, repo, e)
            res['comment'].append(msg % (repo, e))

    return res
コード例 #7
0
ファイル: test_pillar.py プロジェクト: wololowarrior/salt
    def test_relative_include(self, tempdir):
        join = os.path.join
        with fopen(join(tempdir, "top.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    base:
                      '*':
                        - includer
                        - simple_includer
                        - includes.with.more.depth
                """),
                file=f,
            )
        includer_dir = join(tempdir, "includer")
        os.makedirs(includer_dir)
        with fopen(join(includer_dir, "init.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    include:
                      - .this
                      - includer.that
                """),
                file=f,
            )
        with fopen(join(includer_dir, "this.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    this:
                        is all good
                """),
                file=f,
            )
        with fopen(join(includer_dir, "that.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    that:
                        is also all good
                """),
                file=f,
            )

        with fopen(join(tempdir, "simple_includer.sls"),
                   "w") as simpleincluder:
            print(
                textwrap.dedent("""
                    include:
                      - .simple
                      - super_simple
                """),
                file=simpleincluder,
            )
        with fopen(join(tempdir, "simple.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    simple:
                      simon
                """),
                file=f,
            )
        with fopen(join(tempdir, "super_simple.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    super simple:
                      a caveman
                """),
                file=f,
            )

        depth_dir = join(tempdir, "includes", "with", "more")
        os.makedirs(depth_dir)
        with fopen(join(depth_dir, "depth.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    include:
                      - .ramble
                      - includes.with.more.doors

                    mordor:
                        has dark depths
                """),
                file=f,
            )

        with fopen(join(depth_dir, "ramble.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    found:
                        my precious
                """),
                file=f,
            )

        with fopen(join(depth_dir, "doors.sls"), "w") as f:
            print(
                textwrap.dedent("""
                    mojo:
                        bad risin'
                """),
                file=f,
            )
        opts = {
            "optimization_order": [0, 1, 2],
            "renderer": "yaml",
            "renderer_blacklist": [],
            "renderer_whitelist": [],
            "state_top": "top.sls",
            "pillar_roots": {
                "base": [tempdir]
            },
            "extension_modules": "",
            "saltenv": "base",
            "file_roots": [],
            "file_ignore_regex": None,
            "file_ignore_glob": None,
        }
        grains = {
            "os": "Ubuntu",
            "os_family": "Debian",
            "oscodename": "raring",
            "osfullname": "Ubuntu",
            "osrelease": "13.04",
            "kernel": "Linux",
        }
        pillar = salt.pillar.Pillar(opts, grains, "minion", "base")
        # Make sure that confirm_top.confirm_top returns True
        pillar.matchers["confirm_top.confirm_top"] = lambda *x, **y: True

        # Act
        compiled_pillar = pillar.compile_pillar()

        # Assert
        self.assertEqual(compiled_pillar["this"], "is all good")
        self.assertEqual(compiled_pillar["that"], "is also all good")
        self.assertEqual(compiled_pillar["simple"], "simon")
        self.assertEqual(compiled_pillar["super simple"], "a caveman")
        self.assertEqual(compiled_pillar["mordor"], "has dark depths")
        self.assertEqual(compiled_pillar["found"], "my precious")
        self.assertEqual(compiled_pillar["mojo"], "bad risin'")
コード例 #8
0
ファイル: test_pillar.py プロジェクト: fake-name/salt
    def test_relative_include(self, tempdir):
        join = os.path.join
        with fopen(join(tempdir, 'top.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    base:
                      '*':
                        - includer
                        - simple_includer
                        - includes.with.more.depth
                '''),
                file=f,
            )
        includer_dir = join(tempdir, 'includer')
        os.makedirs(includer_dir)
        with fopen(join(includer_dir, 'init.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    include:
                      - .this
                      - includer.that
                '''),
                file=f,
            )
        with fopen(join(includer_dir, 'this.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    this:
                        is all good
                '''),
                file=f,
            )
        with fopen(join(includer_dir, 'that.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    that:
                        is also all good
                '''),
                file=f,
            )

        with fopen(join(tempdir, 'simple_includer.sls'), 'w') as simpleincluder:
            print(
                textwrap.dedent('''
                    include:
                      - .simple
                      - super_simple
                '''),
                file=simpleincluder,
            )
        with fopen(join(tempdir, 'simple.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    simple:
                      simon
                '''),
                file=f,
            )
        with fopen(join(tempdir, 'super_simple.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    super simple:
                      a caveman
                '''),
                file=f,
            )

        depth_dir = join(tempdir, 'includes', 'with', 'more')
        os.makedirs(depth_dir)
        with fopen(join(depth_dir, 'depth.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    include:
                      - .ramble
                      - includes.with.more.doors

                    mordor:
                        has dark depths
                '''),
                file=f,
            )

        with fopen(join(depth_dir, 'ramble.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    found:
                        my precious
                '''),
                file=f,
            )

        with fopen(join(depth_dir, 'doors.sls'), 'w') as f:
            print(
                textwrap.dedent('''
                    mojo:
                        bad risin'
                '''),
                file=f,
            )
        opts = {
            'optimization_order': [0, 1, 2],
            'renderer': 'yaml',
            'renderer_blacklist': [],
            'renderer_whitelist': [],
            'state_top': 'top.sls',
            'pillar_roots': {'base': [tempdir]},
            'extension_modules': '',
            'saltenv': 'base',
            'file_roots': [],
            'file_ignore_regex': None,
            'file_ignore_glob': None,
        }
        grains = {
            'os': 'Ubuntu',
            'os_family': 'Debian',
            'oscodename': 'raring',
            'osfullname': 'Ubuntu',
            'osrelease': '13.04',
            'kernel': 'Linux',
        }
        pillar = salt.pillar.Pillar(opts, grains, 'minion', 'base')
        # Make sure that confirm_top.confirm_top returns True
        pillar.matchers['confirm_top.confirm_top'] = lambda *x, **y: True

        # Act
        compiled_pillar = pillar.compile_pillar()

        # Assert
        self.assertEqual(compiled_pillar['this'], 'is all good')
        self.assertEqual(compiled_pillar['that'], 'is also all good')
        self.assertEqual(compiled_pillar['simple'], 'simon')
        self.assertEqual(compiled_pillar['super simple'], 'a caveman')
        self.assertEqual(compiled_pillar['mordor'], 'has dark depths')
        self.assertEqual(compiled_pillar['found'], 'my precious')
        self.assertEqual(compiled_pillar['mojo'], "bad risin'")


        # Assert
        self.assertEqual(compiled_pillar["this"], "is all good")
        self.assertEqual(compiled_pillar["that"], "is also all good")
        self.assertEqual(compiled_pillar["simple"], "simon")
        self.assertEqual(compiled_pillar["super simple"], "a caveman")
        self.assertEqual(compiled_pillar["mordor"], "has dark depths")
        self.assertEqual(compiled_pillar["found"], "my precious")
        self.assertEqual(compiled_pillar["mojo"], "bad risin'")
コード例 #9
0
def extract(template_path, raw_text=None, raw_text_file=None, saltenv='base'):
    r'''
    Extracts the data entities from the unstructured
    raw text sent as input and returns the data
    mapping, processing using the TextFSM template.

    template_path
        The path to the TextFSM template.
        This can be specified using the absolute path
        to the file, or using one of the following URL schemes:

        - ``salt://``, to fetch the template from the Salt fileserver.
        - ``http://`` or ``https://``
        - ``ftp://``
        - ``s3://``
        - ``swift://``

    raw_text: ``None``
        The unstructured text to be parsed.

    raw_text_file: ``None``
        Text file to read, having the raw text to be parsed using the TextFSM template.
        Supports the same URL schemes as the ``template_path`` argument.

    saltenv: ``base``
        Salt fileserver envrionment from which to retrieve the file.
        Ignored if ``template_path`` is not a ``salt://`` URL.

    CLI Example:

    .. code-block:: bash

        salt '*' textfsm.extract salt://textfsm/juniper_version_template raw_text_file=s3://junos_ver.txt
        salt '*' textfsm.extract http://some-server/textfsm/juniper_version_template raw_text='Hostname: router.abc ... snip ...'

    Jinja template example:

    .. code-block:: jinja

        {%- set raw_text = 'Hostname: router.abc ... snip ...' -%}
        {%- set textfsm_extract = salt.textfsm.extract('https://some-server/textfsm/juniper_version_template', raw_text) -%}

    Raw text example:

    .. code-block:: text

        Hostname: router.abc
        Model: mx960
        JUNOS Base OS boot [9.1S3.5]
        JUNOS Base OS Software Suite [9.1S3.5]
        JUNOS Kernel Software Suite [9.1S3.5]
        JUNOS Crypto Software Suite [9.1S3.5]
        JUNOS Packet Forwarding Engine Support (M/T Common) [9.1S3.5]
        JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5]
        JUNOS Online Documentation [9.1S3.5]
        JUNOS Routing Software Suite [9.1S3.5]

    TextFSM Example:

    .. code-block:: text

        Value Chassis (\S+)
        Value Required Model (\S+)
        Value Boot (.*)
        Value Base (.*)
        Value Kernel (.*)
        Value Crypto (.*)
        Value Documentation (.*)
        Value Routing (.*)

        Start
        # Support multiple chassis systems.
          ^\S+:$$ -> Continue.Record
          ^${Chassis}:$$
          ^Model: ${Model}
          ^JUNOS Base OS boot \[${Boot}\]
          ^JUNOS Software Release \[${Base}\]
          ^JUNOS Base OS Software Suite \[${Base}\]
          ^JUNOS Kernel Software Suite \[${Kernel}\]
          ^JUNOS Crypto Software Suite \[${Crypto}\]
          ^JUNOS Online Documentation \[${Documentation}\]
          ^JUNOS Routing Software Suite \[${Routing}\]

    Output example:

    .. code-block:: json

        {
            "comment": "",
            "result": true,
            "out": [
                {
                    "kernel": "9.1S3.5",
                    "documentation": "9.1S3.5",
                    "boot": "9.1S3.5",
                    "crypto": "9.1S3.5",
                    "chassis": "",
                    "routing": "9.1S3.5",
                    "base": "9.1S3.5",
                    "model": "mx960"
                }
            ]
        }
    '''
    ret = {
        'result': False,
        'comment': '',
        'out': None
    }
    log.debug('Using the saltenv: {}'.format(saltenv))
    log.debug('Caching {} using the Salt fileserver'.format(template_path))
    tpl_cached_path = __salt__['cp.cache_file'](template_path, saltenv=saltenv)
    if tpl_cached_path is False:
        ret['comment'] = 'Unable to read the TextFSM template from {}'.format(template_path)
        log.error(ret['comment'])
        return ret
    try:
        log.debug('Reading TextFSM template from cache path: {}'.format(tpl_cached_path))
        # Disabling pylint W8470 to nto complain about fopen.
        # Unfortunately textFSM needs the file handle rather than the content...
        # pylint: disable=W8470
        tpl_file_handle = fopen(tpl_cached_path, 'r')
        # pylint: disable=W8470
        log.debug(tpl_file_handle.read())
        tpl_file_handle.seek(0)  # move the object position back at the top of the file
        fsm_handler = textfsm.TextFSM(tpl_file_handle)
    except textfsm.TextFSMTemplateError as tfte:
        log.error('Unable to parse the TextFSM template', exc_info=True)
        ret['comment'] = 'Unable to parse the TextFSM template from {}: {}. Please check the logs.'.format(
            template_path, tfte)
        return ret
    if not raw_text and raw_text_file:
        log.debug('Trying to read the raw input from {}'.format(raw_text_file))
        raw_text = __salt__['cp.get_file_str'](raw_text_file, saltenv=saltenv)
        if raw_text is False:
            ret['comment'] = 'Unable to read from {}. Please specify a valid input file or text.'.format(raw_text_file)
            log.error(ret['comment'])
            return ret
    if not raw_text:
        ret['comment'] = 'Please specify a valid input file or text.'
        log.error(ret['comment'])
        return ret
    log.debug('Processing the raw text:')
    log.debug(raw_text)
    objects = fsm_handler.ParseText(raw_text)
    ret['out'] = _clitable_to_dict(objects, fsm_handler)
    ret['result'] = True
    return ret
コード例 #10
0
ファイル: portage_config.py プロジェクト: zlobme/salt-common
def append_to_package_conf(conf,
                           atom='',
                           flags=None,
                           string='',
                           overwrite=False):
    '''
    Append a string or a list of flags for a given package or DEPEND atom to a
    given configuration file.

    CLI Example:

    .. code-block:: bash

        salt '*' portage_config.append_to_package_conf use string="app-admin/salt ldap -libvirt"
        salt '*' portage_config.append_to_package_conf use atom="> = app-admin/salt-0.14.1" flags="['ldap', '-libvirt']"
    '''
    if flags is None:
        flags = []
    if conf in SUPPORTED_CONFS:
        if not string:
            if '/' not in atom:
                atom = _p_to_cp(atom)
                if not atom:
                    return
            string = '{0} {1}'.format(atom, ' '.join(flags))
            new_flags = list(flags)
        else:
            atom = string.strip().split()[0]
            new_flags = [flag for flag in string.strip().split(' ')
                         if flag][1:]
            if '/' not in atom:
                atom = _p_to_cp(atom)
                string = '{0} {1}'.format(atom, ' '.join(new_flags))
                if not atom:
                    return

        to_delete_if_empty = []
        if conf == 'accept_keywords':
            if '-~ARCH' in new_flags:
                new_flags.remove('-~ARCH')
                to_delete_if_empty.append(atom)

            if '~ARCH' in new_flags:
                new_flags.remove('~ARCH')
                append_to_package_conf(conf, string=atom, overwrite=overwrite)
                if not new_flags:
                    return

        # Next sort is just aesthetic, can be commented for a small performance
        # boost
        new_flags.sort(key=lambda x: x.lstrip('-'))

        complete_file_path = _get_config_file(conf, atom)
        pdir = os.path.dirname(complete_file_path)
        if not os.path.exists(pdir):
            os.makedirs(pdir, 0o755)

        try:
            shutil.copy(complete_file_path, complete_file_path + '.bak')
        except IOError:
            pass

        try:
            file_handler = fopen(complete_file_path, 'r+')  # pylint: disable=resource-leakage
        except IOError:
            file_handler = fopen(complete_file_path, 'w+')  # pylint: disable=resource-leakage

        new_contents = ''
        added = False

        try:
            for l in file_handler:
                l_strip = l.strip()
                if l_strip == '':
                    new_contents += '\n'
                elif l_strip[0] == '#':
                    new_contents += l
                elif l_strip.split()[0] == atom:
                    if l_strip in to_delete_if_empty:
                        continue
                    if overwrite:
                        new_contents += string.strip() + '\n'
                        added = True
                    else:
                        old_flags = [
                            flag for flag in l_strip.split(' ') if flag
                        ][1:]
                        if conf == 'accept_keywords':
                            if not old_flags:
                                new_contents += l
                                if not new_flags:
                                    added = True
                                continue
                            elif not new_flags:
                                continue
                        merged_flags = _merge_flags(new_flags, old_flags, conf)
                        if merged_flags:
                            new_contents += '{0} {1}\n'.format(
                                atom, ' '.join(merged_flags))
                        else:
                            new_contents += '{0}\n'.format(atom)
                        added = True
                else:
                    new_contents += l
            if not added:
                new_contents += string.strip() + '\n'
        except Exception as exc:
            log.error('Failed to write to %s: %s', complete_file_path, exc)
        else:
            file_handler.seek(0)
            file_handler.truncate(len(new_contents))
            file_handler.write(new_contents)
        finally:
            file_handler.close()

        try:
            os.remove(complete_file_path + '.bak')
        except OSError:
            pass
コード例 #11
0
    def _read(self, source):
        source = self._resolve(source)

        with fopen(source, "rb") as file:
            return file.read()
コード例 #12
0
ファイル: yamlet.py プロジェクト: MatthiasWiesner/salt-tower
    def _read_b64(self, source):
        source = self._resolve(source)

        with fopen(source, 'rb') as f:
            return base64.b64encode(f.read())
コード例 #13
0
ファイル: yamlet.py プロジェクト: MatthiasWiesner/salt-tower
    def _read(self, source):
        source = self._resolve(source)

        with fopen(source, 'rb') as f:
            return f.read()