Пример #1
0
class MapMeta(six.with_metaclass(Prepareable, type)):
    """
    This is the metaclass for our Map class, used for building data maps based
    off of grain data.
    """
    @classmethod
    def __prepare__(metacls, name, bases):
        return OrderedDict()

    def __new__(cls, name, bases, attrs):
        c = type.__new__(cls, name, bases, attrs)
        c.__ordered_attrs__ = attrs.keys()
        return c

    def __init__(cls, name, bases, nmspc):
        cls.__set_attributes__()  # pylint: disable=no-value-for-parameter
        super(MapMeta, cls).__init__(name, bases, nmspc)

    def __set_attributes__(cls):
        match_info = []
        grain_targets = set()

        # find all of our filters
        for item in cls.__ordered_attrs__:
            if item[0] == "_":
                continue

            filt = cls.__dict__[item]

            # only process classes
            if not inspect.isclass(filt):
                continue

            # which grain are we filtering on
            grain = getattr(filt, "__grain__", "os_family")
            grain_targets.add(grain)

            # does the object pointed to have a __match__ attribute?
            # if so use it, otherwise use the name of the object
            # this is so that you can match complex values, which the python
            # class name syntax does not allow
            match = getattr(filt, "__match__", item)

            match_attrs = {}
            for name in filt.__dict__:
                if name[0] != "_":
                    match_attrs[name] = filt.__dict__[name]

            match_info.append((grain, match, match_attrs))

        # Reorder based on priority
        try:
            if not hasattr(cls.priority, "__iter__"):
                log.error("pyobjects: priority must be an iterable")
            else:
                new_match_info = []
                for grain in cls.priority:
                    # Using list() here because we will be modifying
                    # match_info during iteration
                    for index, item in list(enumerate(match_info)):
                        try:
                            if item[0] == grain:
                                # Add item to new list
                                new_match_info.append(item)
                                # Clear item from old list
                                match_info[index] = None
                        except TypeError:
                            # Already moved this item to new list
                            pass
                # Add in any remaining items not defined in priority
                new_match_info.extend([x for x in match_info if x is not None])
                # Save reordered list as the match_info list
                match_info = new_match_info
        except AttributeError:
            pass

        # Check for matches and update the attrs dict accordingly
        attrs = {}
        if match_info:
            grain_vals = Map.__salt__["grains.item"](*grain_targets)
            for grain, match, match_attrs in match_info:
                if grain not in grain_vals:
                    continue
                if grain_vals[grain] == match:
                    attrs.update(match_attrs)

        if hasattr(cls, "merge"):
            pillar = Map.__salt__["pillar.get"](cls.merge)
            if pillar:
                attrs.update(pillar)

        for name in attrs:
            setattr(cls, name, attrs[name])
Пример #2
0
class Map(six.with_metaclass(MapMeta, object)):  # pylint: disable=W0232
    __salt__ = {"grains.filter_by": need_salt, "pillar.get": need_salt}
Пример #3
0
class TestProgram(six.with_metaclass(TestProgramMeta, object)):
    '''
    Set up an arbitrary executable to run.

    :attribute dirtree: An iterable of directories to be created
    '''

    empty_config = ''
    config_file = ''

    config_attrs = set([
        'name',
        'test_dir',
        'config_dirs',
    ])
    config_vals = {}
    config_base = ''
    config_dir = os.path.join('etc')
    configs = {}
    config_types = (
        str,
        six.string_types,
    )

    dirtree = [
        '&config_dirs',
    ]

    @staticmethod
    def config_caster(cfg):
        return str(cfg)

    def __init__(self,
                 program=None,
                 name=None,
                 env=None,
                 shell=False,
                 parent_dir=None,
                 clean_on_exit=True,
                 **kwargs):
        self.program = program or getattr(self, 'program', None)
        self.name = name or getattr(self, 'name', '')
        self.env = env or {}
        self.shell = shell
        self._parent_dir = parent_dir or None
        self.clean_on_exit = clean_on_exit
        self._root_dir = kwargs.pop('root_dir', self.name)
        self.config_dir = kwargs.pop('config_dir', copy.copy(self.config_dir))

        config_attrs = copy.copy(self.config_attrs)
        config_attrs.update(kwargs.pop('config_attrs', set()))
        self.config_attrs = config_attrs

        config_vals = copy.copy(self.config_vals)
        config_vals.update(kwargs.pop('config_vals', {}))
        self.config_vals = config_vals

        config_base = copy.deepcopy(self.config_base)
        config_base = self.config_merge(
            config_base, kwargs.pop('config_base', self.config_types[0]()))
        self.config_base = config_base

        configs = copy.deepcopy(self.configs)
        for cname, cinfo in kwargs.pop('configs', {}).items():
            target = configs.setdefault(cname, {})
            if 'path' in cinfo:
                target['path'] = cinfo['path']
            if 'map' in cinfo:
                target_map = target.setdefault('map', self.config_types[0]())
                target_map = self.config_merge(target_map, cinfo['map'])
                target['map'] = target_map
        self.configs = configs

        if not self.name:
            if not self.program:
                raise ValueError(
                    '"{0}" object must specify "program" parameter'.format(
                        self.__class__.__name__))
            self.name = os.path.basename(self.program)

        self.process = None
        self.created_parent_dir = False
        self._setup_done = False

        dirtree = set(self.dirtree)
        dirtree.update(kwargs.pop('dirtree', []))
        self.dirtree = dirtree

        # Register the exit clean-up before making anything needing clean-up
        atexit.register(self.cleanup)

    def __enter__(self):
        pass

    def __exit__(self, typ, value, traceback):
        pass

    @property
    def test_dir(self):
        '''Directory that will contains all of the static and dynamic files for the daemon'''
        return os.path.join(self.parent_dir, self._root_dir)

    def config_file_get(self, config):
        '''Get the filename (viz. path) to the configuration file'''
        cfgf = self.configs[config].get('path')
        if cfgf:
            cfgf.format(**self.config_subs())
        else:
            cfgf = os.path.join(self.config_dir, config)
        return cfgf

    def config_dir_get(self, config):
        '''Get the parent directory for the configuration file'''
        return os.path.dirname(self.config_file_get(config))

    @property
    def config_dirs(self):
        '''Return a list of configuration directories'''
        cdirs = [self.config_dir_get(config) for config in self.configs.keys()]
        return cdirs

    def abs_path(self, path):
        '''Absolute path of file including the test_dir'''
        return os.path.join(self.test_dir, path)

    @property
    def start_pid(self):
        '''PID of the called script prior to deamonizing.'''
        return self.process.pid if self.process else None

    @property
    def parent_dir(self):
        '''
        Directory that contains everything generated for running scripts - possibly
        for multiple scripts.
        '''
        if self._parent_dir is None:
            self.created_parent_dir = True
            self._parent_dir = tempfile.mkdtemp(prefix='salt-testdaemon-')
        else:
            self._parent_dir = os.path.abspath(
                os.path.normpath(self._parent_dir))
            if not os.path.exists(self._parent_dir):
                self.created_parent_dir = True
                os.makedirs(self._parent_dir)
            elif not os.path.isdir(self._parent_dir):
                raise ValueError(
                    'Parent path "{0}" exists but is not a directory'.format(
                        self._parent_dir))
        return self._parent_dir

    def config_write(self, config):
        '''Write out the config to a file'''
        if not config:
            return
        cpath = self.abs_path(self.config_file_get(config))
        with open(cpath, 'w') as cfo:
            cfg = self.config_stringify(config)
            log.debug('Writing configuration for {0} to {1}:\n{2}'.format(
                self.name, cpath, cfg))
            cfo.write(cfg)
            cfo.flush()

    def configs_write(self):
        '''Write all configuration files'''
        for config in self.configs:
            self.config_write(config)

    def config_type(self, config):
        '''Check if a configuration is an acceptable type.'''
        return isinstance(config, self.config_types)

    def config_cast(self, config):
        '''Cast a configuration to the internal expected type.'''
        if not self.config_type(config):
            config = self.config_caster(config)
        return config

    def config_subs(self):
        '''Get the substitution values for use to generate the config'''
        subs = dict([(attr, getattr(self, attr, None))
                     for attr in self.config_attrs])
        for key, val in self.config_vals.items():
            subs[key] = val.format(**subs)
        return subs

    def config_stringify(self, config):
        '''Get the configuration as a string'''
        cfg = self.config_get(config)
        cfg.format(**self.config_subs())
        return cfg

    def config_merge(self, base, overrides):
        '''Merge two configuration hunks'''
        base = self.config_cast(base)
        overrides = self.config_cast(overrides)
        return ''.join([base, overrides])

    def config_get(self, config):
        '''Get the configuration data'''
        return self.configs[config]

    def config_set(self, config, val):
        '''Set the configuration data'''
        self.configs[config] = val

    def make_dirtree(self):
        '''Create directory structure.'''
        subdirs = []
        for branch in self.dirtree:
            log.debug('checking dirtree: {0}'.format(branch))
            if not branch:
                continue
            if isinstance(branch, six.string_types) and branch[0] == '&':
                log.debug('Looking up dirtree branch "{0}"'.format(branch))
                try:
                    dirattr = getattr(self, branch[1:], None)
                    log.debug('dirtree "{0}" => "{1}"'.format(branch, dirattr))
                except AttributeError:
                    raise ValueError(
                        'Unable to find dirtree attribute "{0}" on object "{1}.name = {2}: {3}"'
                        .format(
                            branch,
                            self.__class__.__name__,
                            self.name,
                            dir(self),
                        ))

                if not dirattr:
                    continue

                if isinstance(dirattr, six.string_types):
                    subdirs.append(dirattr)
                elif hasattr(dirattr, '__iter__'):
                    subdirs.extend(dirattr)
                else:
                    raise TypeError(
                        "Branch type of {0} in dirtree is unhandled".format(
                            branch))
            elif isinstance(branch, six.string_types):
                subdirs.append(branch)
            else:
                raise TypeError(
                    "Branch type of {0} in dirtree is unhandled".format(
                        branch))

        for subdir in subdirs:
            path = self.abs_path(subdir)
            if not os.path.exists(path):
                log.debug('make_dirtree: {0}'.format(path))
                os.makedirs(path)

    def setup(self, *args, **kwargs):
        '''Create any scaffolding for run-time'''

        # unused
        _ = args, kwargs

        if not self._setup_done:
            self.make_dirtree()
            self.configs_write()
            self._setup_done = True

    def cleanup(self, *args, **kwargs):
        ''' Clean out scaffolding of setup() and any run-time generated files.'''
        # Unused for now
        _ = (args, kwargs)

        if self.process:
            try:
                self.process.kill()
                self.process.wait()
            except OSError:
                pass
        if os.path.exists(self.test_dir):
            shutil.rmtree(self.test_dir)
        if self.created_parent_dir and os.path.exists(self.parent_dir):
            shutil.rmtree(self.parent_dir)

    def run(
        self,
        args=None,
        catch_stderr=False,
        with_retcode=False,
        timeout=None,
        raw=False,
        env=None,
        verbatim_args=False,
        verbatim_env=False,
    ):
        '''
        Execute a command possibly using a supplied environment.

        :param args:
            A command string or a command sequence of arguments for the program.

        :param catch_stderr: A boolean whether to capture and return stderr.

        :param with_retcode: A boolean whether to return the exit code.

        :param timeout: A float of how long to wait for the process to
            complete before it is killed.

        :param raw: A boolean whether to return buffer strings for stdout and
            stderr or sequences of output lines.

        :param env: A dictionary of environment key/value settings for the
            command.

        :param verbatim_args: A boolean whether to automatically add inferred arguments.

        :param verbatim_env: A boolean whether to automatically add inferred
            environment values.

        :return list: (stdout [,stderr] [,retcode])
        '''

        # unused for now
        _ = verbatim_args

        self.setup()

        if args is None:
            args = []

        if env is None:
            env = {}

        env_delta = {}
        env_delta.update(self.env)
        env_delta.update(env)

        if not verbatim_env:
            env_pypath = env_delta.get('PYTHONPATH',
                                       os.environ.get('PYTHONPATH'))
            if not env_pypath:
                env_pypath = sys.path
            else:
                env_pypath = env_pypath.split(':')
                for path in sys.path:
                    if path not in env_pypath:
                        env_pypath.append(path)
            # Always ensure that the test tree is searched first for python modules
            if integration.CODE_DIR != env_pypath[0]:
                env_pypath.insert(0, integration.CODE_DIR)
            env_delta['PYTHONPATH'] = ':'.join(env_pypath)

        cmd_env = dict(os.environ)
        cmd_env.update(env_delta)

        popen_kwargs = {
            'shell': self.shell,
            'stdout': subprocess.PIPE,
            'env': cmd_env,
        }

        if catch_stderr is True:
            popen_kwargs['stderr'] = subprocess.PIPE

        if not sys.platform.lower().startswith('win'):
            popen_kwargs['close_fds'] = True

            def detach_from_parent_group():
                '''
                A utility function that prevents child process from getting parent signals.
                '''
                os.setpgrp()

            popen_kwargs['preexec_fn'] = detach_from_parent_group

        elif sys.platform.lower().startswith('win') and timeout is not None:
            raise RuntimeError('Timeout is not supported under windows')

        argv = [self.program]
        argv.extend(args)
        log.debug('TestProgram.run: {0} Environment {1}'.format(
            argv, env_delta))
        process = subprocess.Popen(argv, **popen_kwargs)
        self.process = process

        if timeout is not None:
            stop_at = datetime.now() + timedelta(seconds=timeout)
            term_sent = False
            while True:
                process.poll()

                if datetime.now() > stop_at:
                    if term_sent is False:
                        # Kill the process group since sending the term signal
                        # would only terminate the shell, not the command
                        # executed in the shell
                        os.killpg(os.getpgid(process.pid), signal.SIGINT)
                        term_sent = True
                        continue

                    try:
                        # As a last resort, kill the process group
                        os.killpg(os.getpgid(process.pid), signal.SIGKILL)
                        process.wait()
                    except OSError as exc:
                        if exc.errno != errno.ESRCH:
                            raise

                    out = process.stdout.read().splitlines()
                    out.extend([
                        'Process took more than {0} seconds to complete. '
                        'Process Killed!'.format(timeout)
                    ])
                    if catch_stderr:
                        err = process.stderr.read().splitlines()
                        if with_retcode:
                            return out, err, process.returncode
                        else:
                            return out, err
                    if with_retcode:
                        return out, process.returncode
                    else:
                        return out

                if process.returncode is not None:
                    break

        if catch_stderr:
            if sys.version_info < (2, 7):
                # On python 2.6, the subprocess'es communicate() method uses
                # select which, is limited by the OS to 1024 file descriptors
                # We need more available descriptors to run the tests which
                # need the stderr output.
                # So instead of .communicate() we wait for the process to
                # finish, but, as the python docs state "This will deadlock
                # when using stdout=PIPE and/or stderr=PIPE and the child
                # process generates enough output to a pipe such that it
                # blocks waiting for the OS pipe buffer to accept more data.
                # Use communicate() to avoid that." <- a catch, catch situation
                #
                # Use this work around were it's needed only, python 2.6
                process.wait()
                out = process.stdout.read()
                err = process.stderr.read()
            else:
                out, err = process.communicate()
            # Force closing stderr/stdout to release file descriptors
            if process.stdout is not None:
                process.stdout.close()
            if process.stderr is not None:
                process.stderr.close()
            # pylint: disable=maybe-no-member
            try:
                if with_retcode:
                    if out is not None and err is not None:
                        if not raw:
                            return out.splitlines(), err.splitlines(
                            ), process.returncode
                        else:
                            return out, err, process.returncode
                    return out.splitlines(), [], process.returncode
                else:
                    if out is not None and err is not None:
                        if not raw:
                            return out.splitlines(), err.splitlines()
                        else:
                            return out, err
                    if not raw:
                        return out.splitlines(), []
                    else:
                        return out, []
            finally:
                try:
                    process.terminate()
                except OSError as err:
                    # process already terminated
                    pass
            # pylint: enable=maybe-no-member

        data = process.communicate()
        process.stdout.close()

        try:
            if with_retcode:
                if not raw:
                    return data[0].splitlines(), process.returncode
                else:
                    return data[0], process.returncode
            else:
                if not raw:
                    return data[0].splitlines()
                else:
                    return data[0]
        finally:
            try:
                process.terminate()
            except OSError as err:
                # process already terminated
                pass
Пример #4
0
class TestSaltDaemon(
        six.with_metaclass(TestSaltProgramMeta, TestDaemon, TestSaltProgram)):
    '''
    A class to run arbitrary salt daemons (master, minion, syndic, etc.)
    '''
    pass
Пример #5
0
class SaltLoggingClass(
        six.with_metaclass(LoggingMixInMeta, LOGGING_LOGGER_CLASS,
                           NewStyleClassMixIn)):
    def __new__(cls, *args):  # pylint: disable=W0613, E1002
        '''
        We override `__new__` in our logging logger class in order to provide
        some additional features like expand the module name padding if length
        is being used, and also some Unicode fixes.

        This code overhead will only be executed when the class is
        instantiated, i.e.:

            logging.getLogger(__name__)

        '''
        instance = super(SaltLoggingClass, cls).__new__(cls)

        try:
            max_logger_length = len(
                max(list(logging.Logger.manager.loggerDict), key=len))
            for handler in logging.root.handlers:
                if handler in (LOGGING_NULL_HANDLER, LOGGING_STORE_HANDLER,
                               LOGGING_TEMP_HANDLER):
                    continue

                formatter = handler.formatter
                if not formatter:
                    continue

                if not handler.lock:
                    handler.createLock()
                handler.acquire()

                fmt = formatter._fmt.replace('%', '%%')

                match = MODNAME_PATTERN.search(fmt)
                if not match:
                    # Not matched. Release handler and return.
                    handler.release()
                    return instance

                if 'digits' not in match.groupdict():
                    # No digits group. Release handler and return.
                    handler.release()
                    return instance

                digits = match.group('digits')
                if not digits or not (digits and digits.isdigit()):
                    # No valid digits. Release handler and return.
                    handler.release()
                    return instance

                if int(digits) < max_logger_length:
                    # Formatter digits value is lower than current max, update.
                    fmt = fmt.replace(match.group('name'), '%%(name)-%ds')
                    formatter = logging.Formatter(fmt % max_logger_length,
                                                  datefmt=formatter.datefmt)
                    handler.setFormatter(formatter)
                handler.release()
        except ValueError:
            # There are no registered loggers yet
            pass
        return instance

    def _log(
            self,
            level,
            msg,
            args,
            exc_info=None,
            extra=None,  # pylint: disable=arguments-differ
            exc_info_on_loglevel=None):
        # If both exc_info and exc_info_on_loglevel are both passed, let's fail
        if exc_info and exc_info_on_loglevel:
            raise RuntimeError(
                'Only one of \'exc_info\' and \'exc_info_on_loglevel\' is '
                'permitted')
        if exc_info_on_loglevel is not None:
            if isinstance(exc_info_on_loglevel, six.string_types):
                exc_info_on_loglevel = LOG_LEVELS.get(exc_info_on_loglevel,
                                                      logging.ERROR)
            elif not isinstance(exc_info_on_loglevel, int):
                raise RuntimeError(
                    'The value of \'exc_info_on_loglevel\' needs to be a '
                    'logging level or a logging level name, not \'{0}\''.
                    format(exc_info_on_loglevel))
        if extra is None:
            extra = {'exc_info_on_loglevel': exc_info_on_loglevel}
        else:
            extra['exc_info_on_loglevel'] = exc_info_on_loglevel

        LOGGING_LOGGER_CLASS._log(self,
                                  level,
                                  msg,
                                  args,
                                  exc_info=exc_info,
                                  extra=extra)

    # pylint: disable=C0103
    # pylint: disable=W0221
    def makeRecord(self,
                   name,
                   level,
                   fn,
                   lno,
                   msg,
                   args,
                   exc_info,
                   func=None,
                   extra=None,
                   sinfo=None):
        # Let's remove exc_info_on_loglevel from extra
        exc_info_on_loglevel = extra.pop('exc_info_on_loglevel')
        if not extra:
            # If nothing else is in extra, make it None
            extra = None

        # Let's try to make every logging message unicode
        if isinstance(msg, six.string_types) \
                and not isinstance(msg, six.text_type):
            salt_system_encoding = __salt_system_encoding__
            if salt_system_encoding == 'ascii':
                # Encoding detection most likely failed, let's use the utf-8
                # value which we defaulted before __salt_system_encoding__ was
                # implemented
                salt_system_encoding = 'utf-8'
            try:
                _msg = msg.decode(salt_system_encoding, 'replace')
            except UnicodeDecodeError:
                _msg = msg.decode(salt_system_encoding, 'ignore')
        else:
            _msg = msg

        if six.PY3:
            logrecord = _LOG_RECORD_FACTORY(name, level, fn, lno, _msg, args,
                                            exc_info, func, sinfo)
        else:
            logrecord = _LOG_RECORD_FACTORY(name, level, fn, lno, _msg, args,
                                            exc_info, func)

        if extra is not None:
            for key in extra:
                if (key in ['message', 'asctime']) or (key
                                                       in logrecord.__dict__):
                    raise KeyError(
                        'Attempt to overwrite \'{0}\' in LogRecord'.format(
                            key))
                logrecord.__dict__[key] = extra[key]

        if exc_info_on_loglevel is not None:
            # Let's add some custom attributes to the LogRecord class in order
            # to include the exc_info on a per handler basis. This will allow
            # showing tracebacks on logfiles but not on console if the logfile
            # handler is enabled for the log level "exc_info_on_loglevel" and
            # console handler is not.
            logrecord.exc_info_on_loglevel_instance = sys.exc_info()
            logrecord.exc_info_on_loglevel_formatted = None

        logrecord.exc_info_on_loglevel = exc_info_on_loglevel
        return logrecord
Пример #6
0
class TestSaltProgram(six.with_metaclass(TestSaltProgramMeta, TestProgram)):
    '''
    This is like TestProgram but with some functions to run a salt-specific
    auxiliary program.
    '''
    config_types = (dict, )
    config_attrs = set([
        'log_dir',
        'script_dir',
    ])
    config_base = {
        'root_dir': '{test_dir}',
    }
    configs = {}
    config_dir = os.path.join('etc', 'salt')

    log_dir = os.path.join('var', 'log', 'salt')

    dirtree = [
        '&log_dir',
        '&script_dir',
    ]

    script = ''
    script_dir = 'bin'

    @staticmethod
    def config_caster(cfg):
        return yaml.safe_load(cfg)

    def __init__(self, *args, **kwargs):
        if len(args) < 2 and 'program' not in kwargs:
            # This is effectively a place-holder - it gets set correctly after super()
            kwargs['program'] = self.script
        super(TestSaltProgram, self).__init__(*args, **kwargs)
        self.program = self.abs_path(os.path.join(self.script_dir,
                                                  self.script))
        path = self.env.get('PATH', os.getenv('PATH'))
        self.env['PATH'] = ':'.join([self.abs_path(self.script_dir), path])

    def config_merge(self, base, overrides):
        _base = self.config_cast(copy.deepcopy(base))
        _overrides = self.config_cast(overrides)
        # NOTE: this simple update will not work for deep dictionaries
        _base.update(copy.deepcopy(_overrides))
        return _base

    def config_get(self, config):
        cfg_base = {}
        for key, val in self.config_base.items():
            _val = val
            if val and isinstance(val, six.string_types) and val[0] == '&':
                _val = getattr(self, val[1:], None)
                if _val is None:
                    continue
            cfg_base[key] = _val
        if config in self.configs:
            cfg = {}
            for key, val in self.configs.get(config, {}).get('map',
                                                             {}).items():
                _val = val
                if val and isinstance(val, six.string_types) and val[0] == '&':
                    _val = getattr(self, val[1:], None)
                    if _val is None:
                        continue
                cfg[key] = _val
            cfg = self.config_merge(cfg_base, cfg)
        log.debug('Generated config => {0}'.format(cfg))
        return cfg

    def config_stringify(self, config):
        '''Transform the configuration data into a string (suitable to write to a file)'''
        subs = self.config_subs()
        cfg = {}
        for key, val in self.config_get(config).items():
            if isinstance(val, six.string_types):
                cfg[key] = val.format(**subs)
            else:
                cfg[key] = val
        scfg = yaml.safe_dump(cfg, default_flow_style=False)
        return scfg

    def setup(self, *args, **kwargs):
        super(TestSaltProgram, self).setup(*args, **kwargs)
        self.install_script()

    def install_script(self):
        '''Generate the script file that calls python objects and libraries.'''
        lines = []
        script_source = os.path.join(integration.CODE_DIR, 'scripts',
                                     self.script)
        with open(script_source, 'r') as sso:
            lines.extend(sso.readlines())
        if lines[0].startswith('#!'):
            lines.pop(0)
        lines.insert(0, '#!{0}\n'.format(sys.executable))

        script_path = self.abs_path(os.path.join(self.script_dir, self.script))
        log.debug('Installing "{0}" to "{1}"'.format(script_source,
                                                     script_path))
        with open(script_path, 'w') as sdo:
            sdo.write(''.join(lines))
            sdo.flush()

        os.chmod(script_path, 0o755)

    def run(self, **kwargs):
        if not kwargs.get('verbatim_args'):
            args = kwargs.setdefault('args', [])
            if '-c' not in args and '--config-dir' not in args:
                args.extend(['--config-dir', self.abs_path(self.config_dir)])
        return super(TestSaltProgram, self).run(**kwargs)
Пример #7
0
class SqlBaseExtPillar(six.with_metaclass(abc.ABCMeta, object)):
    '''
    This class receives and processes the database rows in a database
    agnostic way.
    '''

    result = None
    focus = None
    field_names = None
    num_fields = 0
    depth = 0
    as_list = False
    with_lists = None
    ignore_null = False

    def __init__(self):
        self.result = self.focus = {}

    @classmethod
    @abc.abstractmethod
    def _db_name(cls):
        '''
        Return a friendly name for the database, e.g. 'MySQL' or 'SQLite'.
        Used in logging output.
        '''
        pass

    @abc.abstractmethod
    def _get_cursor(self):
        '''
        Yield a PEP 249 compliant Cursor as a context manager.
        '''
        pass

    def extract_queries(self, args, kwargs):
        '''
        This function normalizes the config block into a set of queries we
        can use.  The return is a list of consistently laid out dicts.
        '''
        # Please note the function signature is NOT an error.  Neither args, nor
        # kwargs should have asterisks.  We are passing in a list and dict,
        # rather than receiving variable args.  Adding asterisks WILL BREAK the
        # function completely.

        # First, this is the query buffer.  Contains lists of [base,sql]
        qbuffer = []

        # Add on the non-keywords...
        qbuffer.extend([[None, s] for s in args])

        # And then the keywords...
        # They aren't in definition order, but they can't conflict each other.
        klist = list(kwargs.keys())
        klist.sort()
        qbuffer.extend([[k, kwargs[k]] for k in klist])

        # Filter out values that don't have queries.
        qbuffer = [
            x for x in qbuffer
            if ((isinstance(x[1], six.string_types) and len(x[1])) or (
                isinstance(x[1], (list, tuple)) and (len(x[1]) > 0) and x[1][0]
            ) or (isinstance(x[1], dict) and 'query' in x[1]
                  and len(x[1]['query'])))
        ]

        # Next, turn the whole buffer into full dicts.
        for qb in qbuffer:
            defaults = {
                'query': '',
                'depth': 0,
                'as_list': False,
                'with_lists': None,
                'ignore_null': False
            }
            if isinstance(qb[1], six.string_types):
                defaults['query'] = qb[1]
            elif isinstance(qb[1], (list, tuple)):
                defaults['query'] = qb[1][0]
                if len(qb[1]) > 1:
                    defaults['depth'] = qb[1][1]
                # May set 'as_list' from qb[1][2].
            else:
                defaults.update(qb[1])
                if defaults['with_lists'] and isinstance(
                        defaults['with_lists'], six.string_types):
                    defaults['with_lists'] = [
                        int(i) for i in defaults['with_lists'].split(',')
                    ]
            qb[1] = defaults

        return qbuffer

    def enter_root(self, root):
        '''
        Set self.focus for kwarg queries
        '''
        # There is no collision protection on root name isolation
        if root:
            self.result[root] = self.focus = {}
        else:
            self.focus = self.result

    def process_fields(self, field_names, depth):
        '''
        The primary purpose of this function is to store the sql field list
        and the depth to which we process.
        '''
        # List of field names in correct order.
        self.field_names = field_names
        # number of fields.
        self.num_fields = len(field_names)
        # Constrain depth.
        if (depth == 0) or (depth >= self.num_fields):
            self.depth = self.num_fields - 1
        else:
            self.depth = depth

    def process_results(self, rows):
        '''
        This function takes a list of database results and iterates over,
        merging them into a dict form.
        '''
        listify = OrderedDict()
        listify_dicts = OrderedDict()
        for ret in rows:
            # crd is the Current Return Data level, to make this non-recursive.
            crd = self.focus
            # Walk and create dicts above the final layer
            for i in range(0, self.depth - 1):
                # At the end we'll use listify to find values to make a list of
                if i + 1 in self.with_lists:
                    if id(crd) not in listify:
                        listify[id(crd)] = []
                        listify_dicts[id(crd)] = crd
                    if ret[i] not in listify[id(crd)]:
                        listify[id(crd)].append(ret[i])
                if ret[i] not in crd:
                    # Key missing
                    crd[ret[i]] = {}
                    crd = crd[ret[i]]
                else:
                    # Check type of collision
                    ty = type(crd[ret[i]])
                    if ty is list:
                        # Already made list
                        temp = {}
                        crd[ret[i]].append(temp)
                        crd = temp
                    elif ty is not dict:
                        # Not a list, not a dict
                        if self.as_list:
                            # Make list
                            temp = {}
                            crd[ret[i]] = [crd[ret[i]], temp]
                            crd = temp
                        else:
                            # Overwrite
                            crd[ret[i]] = {}
                            crd = crd[ret[i]]
                    else:
                        # dict, descend.
                        crd = crd[ret[i]]

            # If this test is true, the penultimate field is the key
            if self.depth == self.num_fields - 1:
                nk = self.num_fields - 2  # Aka, self.depth-1
                # Should we and will we have a list at the end?
                if ((self.as_list and (ret[nk] in crd))
                        or (nk + 1 in self.with_lists)):
                    if ret[nk] in crd:
                        if not isinstance(crd[ret[nk]], list):
                            crd[ret[nk]] = [crd[ret[nk]]]
                        # if it's already a list, do nothing
                    else:
                        crd[ret[nk]] = []
                    crd[ret[nk]].append(ret[self.num_fields - 1])
                else:
                    if not self.ignore_null or ret[self.num_fields -
                                                   1] is not None:
                        crd[ret[nk]] = ret[self.num_fields - 1]
            else:
                # Otherwise, the field name is the key but we have a spare.
                # The spare results because of {c: d} vs {c: {"d": d, "e": e }}
                # So, make that last dict
                if ret[self.depth - 1] not in crd:
                    crd[ret[self.depth - 1]] = {}
                # This bit doesn't escape listify
                if self.depth in self.with_lists:
                    if id(crd) not in listify:
                        listify[id(crd)] = []
                        listify_dicts[id(crd)] = crd
                    if ret[self.depth - 1] not in listify[id(crd)]:
                        listify[id(crd)].append(ret[self.depth - 1])
                crd = crd[ret[self.depth - 1]]
                # Now for the remaining keys, we put them into the dict
                for i in range(self.depth, self.num_fields):
                    nk = self.field_names[i]
                    # Listify
                    if i + 1 in self.with_lists:
                        if id(crd) not in listify:
                            listify[id(crd)] = []
                            listify_dicts[id(crd)] = crd
                        if nk not in listify[id(crd)]:
                            listify[id(crd)].append(nk)
                    # Collision detection
                    if self.as_list and (nk in crd):
                        # Same as before...
                        if isinstance(crd[nk], list):
                            crd[nk].append(ret[i])
                        else:
                            crd[nk] = [crd[nk], ret[i]]
                    else:
                        if not self.ignore_null or ret[i] is not None:
                            crd[nk] = ret[i]
        # Get key list and work backwards.  This is inner-out processing
        ks = list(listify_dicts.keys())
        ks.reverse()
        for i in ks:
            d = listify_dicts[i]
            for k in listify[i]:
                if isinstance(d[k], dict):
                    d[k] = list(d[k].values())
                elif isinstance(d[k], list):
                    d[k] = [d[k]]

    def fetch(
            self,
            minion_id,
            pillar,  # pylint: disable=W0613
            *args,
            **kwargs):
        '''
        Execute queries, merge and return as a dict.
        '''
        db_name = self._db_name()
        log.info('Querying {0} for information for {1}'.format(
            db_name, minion_id))
        #
        #    log.debug('ext_pillar {0} args: {1}'.format(db_name, args))
        #    log.debug('ext_pillar {0} kwargs: {1}'.format(db_name, kwargs))
        #
        # Most of the heavy lifting is in this class for ease of testing.
        qbuffer = self.extract_queries(args, kwargs)
        with self._get_cursor() as cursor:
            for root, details in qbuffer:
                # Run the query
                cursor.execute(details['query'], (minion_id, ))

                # Extract the field names the db has returned and process them
                self.process_fields([row[0] for row in cursor.description],
                                    details['depth'])
                self.enter_root(root)
                self.as_list = details['as_list']
                if details['with_lists']:
                    self.with_lists = details['with_lists']
                else:
                    self.with_lists = []
                self.ignore_null = details['ignore_null']
                self.process_results(cursor.fetchall())

                log.debug('ext_pillar {0}: Return data: {1}'.format(
                    db_name, self))
        return self.result
Пример #8
0
class SaltStandaloneProxyOptionParser(
    six.with_metaclass(
        salt.utils.parsers.OptionParserMeta,
        salt.utils.parsers.OptionParser,
        salt.utils.parsers.ConfigDirMixIn,
        salt.utils.parsers.MergeConfigMixIn,
        salt.utils.parsers.TimeoutMixIn,
        salt.utils.parsers.LogLevelMixIn,
        salt.utils.parsers.HardCrashMixin,
        salt.utils.parsers.SaltfileMixIn,
        salt.utils.parsers.TargetOptionsMixIn,
        salt.utils.parsers.OutputOptionsMixIn,
        salt.utils.parsers.ArgsStdinMixIn,
        salt.utils.parsers.ProfilingPMixIn,
        salt.utils.parsers.EAuthMixIn,
        salt.utils.parsers.NoParseMixin,
    )
):

    default_timeout = 1

    description = (
        '''
  ___          _   _       ___   ___
 / __|  __ _  | | | |_    / __| | _ \  _ _   ___  __ __  _  _
 \__ \ / _` | | | |  _|   \__ \ |  _/ | '_| / _ \ \ \ / | || |
 |___/ \__,_| |_|  \__|   |___/ |_|   |_|   \___/ /_\_\  \_, |
                                                         |__/
\n
'''
        'salt-sproxy is a tool to invoke arbitrary Salt functions on a group\n'
        'of (network) devices connecting through a Salt Proxy Minion, without\n'
        'having the Proxy Minion services up and running (or the Master).\n'
    )
    VERSION = salt_sproxy.version.__version__

    usage = '%prog [options] <target> <function> [arguments]'

    epilog = (
        'You can find additional help about %prog at '
        'https://salt-sproxy.readthedocs.io/en/latest/'
    )

    # ConfigDirMixIn config filename attribute
    _config_filename_ = 'master'

    # LogLevelMixIn attributes
    _default_logging_level_ = config.DEFAULT_MASTER_OPTS['log_level']
    _default_logging_logfile_ = config.DEFAULT_MASTER_OPTS['log_file']

    def format_description(self, formatter):
        return self.description

    def _mixin_setup(self):
        self.add_option(
            '-r', '--roster', default=False, help='The name of the Salt Roster to use.'
        )
        self.add_option(
            '--roster-file',
            dest='roster_file',
            help='Absolute path to the Roster file to use.',
        )
        self.add_option(
            '--sync',
            default=False,
            action='store_true',
            help=(
                'Return the replies from the devices immediately they are '
                'received, or everything at once.'
            ),
        )
        self.add_option(
            '--cache-grains',
            default=False,
            action='store_true',
            help=(
                'Cache the collected Grains. This is going to override the '
                'existing cached Grains.'
            ),
        )
        self.add_option(
            '--cache-pillar',
            default=False,
            action='store_true',
            help=(
                'Cache the compiled Pillar. This is going to override the '
                'existing cached Pillar.'
            ),
        )
        self.add_option(
            '--no-cached-grains',
            default=False,
            action='store_true',
            help='Do not use the available cached Grains (if any).',
        )
        self.add_option(
            '--no-cached-pillar',
            default=False,
            action='store_true',
            help='Do not use the available cached Pillar (if any)',
        )
        self.add_option(
            '--no-grains',
            default=False,
            action='store_true',
            help=(
                'Do not attempt to collect Grains at all. Use with care, it '
                'may lead to unexpected results.'
            ),
        )
        self.add_option(
            '--no-pillar',
            default=False,
            action='store_true',
            help=(
                'Do not compile Pillar at all. Use with care, it may lead to '
                'unexpected results.'
            ),
        )
        self.add_option(
            '-b',
            '--batch',
            '--batch-size',
            default=CPU_COUNT,
            dest='batch_size',
            help=(
                'The number of devices to connect to in parallel. '
                'Default: {}'.format(CPU_COUNT)
            ),
        )
        self.add_option(
            '--preview-target',
            dest='preview_target',
            action='store_true',
            help='Show the devices expected to match the target.',
        )
        self.add_option(
            '--sync-grains',
            dest='sync_grains',
            action='store_true',
            default=False,
            help=(
                'Re-sync the Grains modules. Useful if you have custom Grains '
                'modules in your own environment.'
            ),
        )
        self.add_option(
            '--sync-modules',
            dest='sync_modules',
            action='store_true',
            help=('Load the salt-sproxy Execution modules.'),
        )
        self.add_option(
            '--sync-roster',
            dest='sync_roster',
            action='store_true',
            help=(
                'Synchronise the Roster modules (both salt-sproxy native '
                'and provided by the user in their own environment).'
            ),
        )
        self.add_option(
            '--events',
            dest='events',
            action='store_true',
            help=(
                'Whether should put the events on the Salt bus (mostly '
                'useful when having a Master running).'
            ),
        )
        self.add_option(
            '--use-proxy',
            '--use-existing-proxy',
            '--use-existing-minion',
            dest='use_existing_proxy',
            action='store_true',
            help=(
                'Use the existing Proxy Minions to execute the commands, '
                'whenever available.'
            ),
        )
        self.add_option(
            '--file-roots',
            '--display-file-roots',
            dest='display_file_roots',
            action='store_true',
            help=(
                'Display the file_roots option you would need to configure '
                'in order to use the salt-sproxy extension modules directly, '
                'and, implicitly, leverage the event-driven methodology and '
                'the Salt REST API.'
            ),
        )
        self.add_option(
            '--save-file-roots',
            dest='save_file_roots',
            action='store_true',
            help=(
                'Saves the file_roots configuration so you can start '
                'leveraging the event-driven automation and the Salt REST API.'
            ),
        )
        self.add_option(
            '--no-connect',
            dest='no_connect',
            action='store_true',
            default=False,
            help=(
                'Do not initiate the connection with the device, only use '
                'cached data to compile data and execute Salt functions that '
                'do not require the actual connection with the device.'
            ),
        )
        self.add_option(
            '--test-ping',
            dest='test_ping',
            action='store_true',
            default=False,
            help=(
                'When using together with --use-existing-proxy, this option can'
                ' help to ensure the existing Proxy Minion is responsive (not '
                'only up and running, by executing a ping test.'
            ),
        )
        self.add_option(
            '--no-target-cache',
            dest='no_target_cache',
            action='store_true',
            default=False,
            help=('Do no load the list of targets from the cache.'),
        )
        group = self.output_options_group = optparse.OptionGroup(
            self, 'Output Options', 'Configure your preferred output format.'
        )
        self.add_option_group(group)

        group.add_option(
            '-q',
            '--quiet',
            default=False,
            action='store_true',
            help='Do not display the results of the run.',
        )

    # Everything else that follows here is verbatim copy from
    # https://github.com/saltstack/salt/blob/develop/salt/utils/parsers.py
    def _mixin_after_parsed(self):
        if self.options.display_file_roots or self.options.save_file_roots:
            # Insert dummy arg when displaying the file_roots
            self.args.append('not_a_valid_target')
            self.args.append('not_a_valid_command')
        if self.options.list:
            try:
                if ',' in self.args[0]:
                    self.config['tgt'] = self.args[0].replace(' ', '').split(',')
                else:
                    self.config['tgt'] = self.args[0].split()
            except IndexError:
                self.exit(42, '\nCannot execute command without defining a target.\n\n')
        else:
            try:
                self.config['tgt'] = self.args[0]
            except IndexError:
                self.exit(42, '\nCannot execute command without defining a target.\n\n')

        if self.options.preview_target:
            # Insert dummy arg which won't be used
            self.args.append('not_a_valid_command')

        # Detect compound command and set up the data for it
        if self.args:
            try:
                if ',' in self.args[1]:
                    self.config['fun'] = self.args[1].split(',')
                    self.config['arg'] = [[]]
                    cmd_index = 0
                    if (
                        self.args[2:].count(self.options.args_separator)
                        == len(self.config['fun']) - 1
                    ):
                        # new style parsing: standalone argument separator
                        for arg in self.args[2:]:
                            if arg == self.options.args_separator:
                                cmd_index += 1
                                self.config['arg'].append([])
                            else:
                                self.config['arg'][cmd_index].append(arg)
                    else:
                        # old style parsing: argument separator can be inside args
                        for arg in self.args[2:]:
                            if self.options.args_separator in arg:
                                sub_args = arg.split(self.options.args_separator)
                                for sub_arg_index, sub_arg in enumerate(sub_args):
                                    if sub_arg:
                                        self.config['arg'][cmd_index].append(sub_arg)
                                    if sub_arg_index != len(sub_args) - 1:
                                        cmd_index += 1
                                        self.config['arg'].append([])
                            else:
                                self.config['arg'][cmd_index].append(arg)
                        if len(self.config['fun']) > len(self.config['arg']):
                            self.exit(
                                42,
                                'Cannot execute compound command without '
                                'defining all arguments.\n',
                            )
                        elif len(self.config['fun']) < len(self.config['arg']):
                            self.exit(
                                42,
                                'Cannot execute compound command with more '
                                'arguments than commands.\n',
                            )
                    # parse the args and kwargs before sending to the publish
                    # interface
                    for i in range(len(self.config['arg'])):
                        self.config['arg'][i] = salt.utils.args.parse_input(
                            self.config['arg'][i], no_parse=self.options.no_parse
                        )
                else:
                    self.config['fun'] = self.args[1]
                    self.config['arg'] = self.args[2:]
                    # parse the args and kwargs before sending to the publish
                    # interface
                    self.config['arg'] = salt.utils.args.parse_input(
                        self.config['arg'], no_parse=self.options.no_parse
                    )
            except IndexError:
                self.exit(42, '\nIncomplete options passed.\n\n')

    def setup_config(self):
        return config.client_config(self.get_config_file_path())
Пример #9
0
class LoaderModuleMockMixin(
        six.with_metaclass(_FixLoaderModuleMockMixinMroOrder, object)):
    '''
    This class will setup salt loader dunders.

    Please check `set_up_loader_mocks` above
    '''

    # Define our setUp function decorator
    @staticmethod
    def __setup_loader_modules_mocks__(setup_func):
        @functools.wraps(setup_func)
        def wrapper(self):
            if NO_MOCK:
                self.skipTest(NO_MOCK_REASON)

            loader_modules_configs = self.setup_loader_modules()
            if not isinstance(loader_modules_configs, dict):
                raise RuntimeError(
                    '{}.setup_loader_modules() must return a dictionary where the keys are the '
                    'modules that require loader mocking setup and the values, the global module '
                    'variables for each of the module being mocked. For example \'__salt__\', '
                    '\'__opts__\', etc.'.format(self.__class__.__name__))

            salt_dunders = (
                '__opts__',
                '__salt__',
                '__runner__',
                '__context__',
                '__utils__',
                '__ext_pillar__',
                '__thorium__',
                '__states__',
                '__serializers__',
                '__ret__',
                '__grains__',
                '__pillar__',
                '__sdb__',
                # Proxy is commented out on purpose since some code in salt expects a NameError
                # and is most of the time not a required dunder
                # '__proxy__'
            )

            for module, module_globals in six.iteritems(
                    loader_modules_configs):
                if not isinstance(module, types.ModuleType):
                    raise RuntimeError(
                        'The dictionary keys returned by {}.setup_loader_modules() '
                        'must be an imported module, not {}'.format(
                            self.__class__.__name__, type(module)))
                if not isinstance(module_globals, dict):
                    raise RuntimeError(
                        'The dictionary values returned by {}.setup_loader_modules() '
                        'must be a dictionary, not {}'.format(
                            self.__class__.__name__, type(module_globals)))

                module_blacklisted_dunders = module_globals.pop(
                    'blacklisted_dunders', ())

                minion_funcs = {}
                if '__salt__' in module_globals and module_globals[
                        '__salt__'] == 'autoload':
                    if '__opts__' not in module_globals:
                        raise RuntimeError(
                            'You must provide \'__opts__\' on the {} module globals dictionary '
                            'to auto load the minion functions'.format(
                                module.__name__))
                    import salt.loader
                    ctx = {}
                    if '__utils__' not in module_globals:
                        utils = salt.loader.utils(
                            module_globals['__opts__'],
                            context=module_globals.get('__context__') or ctx)
                        module_globals['__utils__'] = utils
                    minion_funcs = salt.loader.minion_mods(
                        module_globals['__opts__'],
                        context=module_globals.get('__context__') or ctx,
                        utils=module_globals.get('__utils__'),
                    )
                    module_globals['__salt__'] = minion_funcs

                for dunder_name in salt_dunders:
                    if dunder_name not in module_globals:
                        if dunder_name in module_blacklisted_dunders:
                            continue
                        module_globals[dunder_name] = {}

                sys_modules = module_globals.pop('sys.modules', None)
                if sys_modules is not None:
                    if not isinstance(sys_modules, dict):
                        raise RuntimeError(
                            '\'sys.modules\' must be a dictionary not: {}'.
                            format(type(sys_modules)))
                    patcher = patch.dict(sys.modules, sys_modules)
                    patcher.start()

                    def cleanup_sys_modules(patcher, sys_modules):
                        patcher.stop()
                        del patcher
                        del sys_modules

                    self.addCleanup(cleanup_sys_modules, patcher, sys_modules)

                for key in module_globals:
                    if not hasattr(module, key):
                        if key in salt_dunders:
                            setattr(module, key, {})
                        else:
                            setattr(module, key, None)

                if module_globals:
                    patcher = patch.multiple(module, **module_globals)
                    patcher.start()

                    def cleanup_module_globals(patcher, module_globals):
                        patcher.stop()
                        del patcher
                        del module_globals

                    self.addCleanup(cleanup_module_globals, patcher,
                                    module_globals)

                if minion_funcs:
                    # Since we autoloaded the minion_funcs, let's namespace the functions with the globals
                    # used to patch above
                    import salt.utils
                    for func in minion_funcs:
                        minion_funcs[
                            func] = salt.utils.functools.namespaced_function(
                                minion_funcs[func],
                                module_globals,
                                preserve_context=True)
            return setup_func(self)

        return wrapper

    def setup_loader_modules(self):
        raise NotImplementedError(
            '\'{}.setup_loader_modules()\' must be implemented'.format(
                self.__class__.__name__))
class SaltNodesCommandParser(
        six.with_metaclass(
            salt.utils.parsers.OptionParserMeta,
            salt.utils.parsers.OptionParser,
            salt.utils.parsers.ConfigDirMixIn,
            salt.utils.parsers.ExtendedTargetOptionsMixIn,
            salt.utils.parsers.LogLevelMixIn)):
    '''
    Argument parser used by SaltGenResource to generate
    Rundeck node definitions.
    '''

    usage = '%prog [options] <target> [<attr>=<value> ...]'
    description = 'Salt Mine node source for Rundeck.'
    epilog = None

    _config_filename_ = 'minion'
    _logfile_config_setting_name_ = 'resource_generator_logfile'
    _logfile_loglevel_config_setting_name_ = 'resource_generator_log_level_logfile'
    _default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'resource-generator')
    _setup_mp_logging_listener_ = False
    _default_logging_level_ = 'warning'

    # Ignore requests to provide reserved attribute names
    ignore_attributes = ['hostname', 'osName', 'osVersion', 'osFamily', 'osArch', 'tags']
    ignore_servernode = ['username', 'description']

    def _mixin_setup(self):
        '''
        Define arguments specific to SaltGenResource
        '''

        self.add_option(
            '-m', '--mine-function',
            default='grains.items',
            type=str,
            help=('Set the function name for Salt Mine to execute '
                  'to retrieve grains. Default value is grains.items '
                  'but this could be different if mine function '
                  'aliases are used.')
        )
        self.add_option(
            '-s', '--include-server-node',
            action="store_true",
            help=('Include the Rundeck server node in the output. '
                  'The server node is required for some workflows '
                  'and must be provided by exactly one resource provider.')
        )
        self.add_option(
            '-u', '--server-node-user',
            type=str,
            default='rundeck',
            help=('Specify the user name to use when running jobs on the '
                  'server node. This would typically be the same user that '
                  'the Rundeck service is running as. Default: \'rundeck\'.')
        )
        self.add_option(
            '-a', '--attributes',
            type=str,
            default=[],
            action='callback',
            callback=self.set_callback,
            help=('Create Rundeck node attributes from the values of grains. '
                  'Multiple grains may be specified '
                  'when separated by a space or comma.')
        )
        self.add_option(
            '-t', '--tags',
            type=str,
            default=[],
            action='callback',
            callback=self.set_callback,
            help=('Create Rundeck node tags from the values of grains. '
                  'Multiple grains may be specified '
                  'when separated by a space or comma.')
        )

    def _mixin_after_parsed(self):
        '''
        Validate and process arguments
        '''
        if not os.path.isfile(self.get_config_file_path()):
            log.critical("Configuration file not found")
            sys.exit(-1)

        # Extract targeting expression
        try:
            if self.options.list:
                if ',' in self.args[0]:
                    self.config['tgt'] = \
                        self.args[0].replace(' ', '').split(',')
                else:
                    self.config['tgt'] = self.args[0].split()
            else:
                self.config['tgt'] = self.args[0]
        except IndexError:
            self.exit(42, ('\nCannot execute command without '
                           'defining a target.\n\n'))

        if self.options.log_level:
            self.config['log_level'] = self.options.log_level
        else:
            self.config['log_level'] = self._default_logging_level_

        # Set default targeting option
        if self.config['selected_target_option'] is None:
            self.config['selected_target_option'] = 'glob'

        # Remove conflicting grains
        self.options.attributes = [x for x in self.options.attributes
                                   if x not in self.ignore_attributes]

    def setup_config(self):
        config_opts = config.minion_config(
            self.get_config_file_path(),
            cache_minion_id=True,
            ignore_config_errors=False)

        # Make file based logging work
        if getattr(self.options, self._logfile_config_setting_name_, 'None') is None:

            # Copy the default log file path into the config dict
            if self._logfile_config_setting_name_ not in config_opts:
                config_opts[self._logfile_config_setting_name_] = self._default_logging_logfile_

            # Prepend the root_dir setting to the log file path
            config.prepend_root_dir(config_opts, [self._logfile_config_setting_name_])

            # Copy the altered path back to the options or it will revert to the default
            setattr(self.options, self._logfile_config_setting_name_, config_opts[self._logfile_config_setting_name_])

        else:
            # Copy the provided log file path into the config dict
            if self._logfile_config_setting_name_ not in config_opts:
                config_opts[self._logfile_config_setting_name_] = \
                    getattr(self.options, self._logfile_config_setting_name_, self._default_logging_logfile_)

        return config_opts

    # noinspection PyUnusedLocal
    @staticmethod
    def set_callback(option, opt, value, parser):  # pylint: disable=unused-argument
        '''
        Argument parser callback for handling multi-value sets.
        This callback converts comma-delimited or space-delimited strings
        to list types.
        '''
        if ',' in value:
            setattr(parser.values, option.dest,
                    set(value.replace(' ', '').split(',')))
        else:
            setattr(parser.values, option.dest, set(value.split()))
Пример #11
0
class SaltStandaloneProxyOptionParser(
        six.with_metaclass(
            salt.utils.parsers.OptionParserMeta,
            salt.utils.parsers.OptionParser,
            salt.utils.parsers.ConfigDirMixIn,
            salt.utils.parsers.MergeConfigMixIn,
            salt.utils.parsers.TimeoutMixIn,
            salt.utils.parsers.LogLevelMixIn,
            salt.utils.parsers.HardCrashMixin,
            salt.utils.parsers.SaltfileMixIn,
            salt.utils.parsers.ExtendedTargetOptionsMixIn,
            salt.utils.parsers.OutputOptionsMixIn,
            salt.utils.parsers.ArgsStdinMixIn,
            salt.utils.parsers.ProfilingPMixIn,
            salt.utils.parsers.EAuthMixIn,
            salt.utils.parsers.NoParseMixin,
        )):

    default_timeout = 60

    description = (
        r'''
  ___          _   _       ___   ___
 / __|  __ _  | | | |_    / __| | _ \  _ _   ___  __ __  _  _
 \__ \ / _` | | | |  _|   \__ \ |  _/ | '_| / _ \ \ \ / | || |
 |___/ \__,_| |_|  \__|   |___/ |_|   |_|   \___/ /_\_\  \_, |
                                                         |__/

'''
        'salt-sproxy is a tool to invoke arbitrary Salt functions on a group\n'
        'of (network) devices connecting through a Salt Proxy Minion, without\n'
        'having the Proxy Minion services up and running (or the Master).\n')
    VERSION = salt_sproxy.version.__version__

    usage = '%prog [options] <target> <function> [arguments]'

    epilog = ('You can find additional help about %prog at '
              'https://salt-sproxy.readthedocs.io/en/latest/')

    # ConfigDirMixIn config filename attribute
    _config_filename_ = 'master'

    # LogLevelMixIn attributes
    _default_logging_level_ = config.DEFAULT_MASTER_OPTS['log_level']
    _default_logging_logfile_ = config.DEFAULT_MASTER_OPTS['log_file']

    def format_description(self, formatter):
        return self.description

    def _mixin_setup(self):
        self.add_option('-r',
                        '--roster',
                        default=False,
                        help='The name of the Salt Roster to use.')
        self.add_option(
            '--roster-file',
            dest='roster_file',
            help='Absolute path to the Roster file to use.',
        )
        self.add_option(
            '-s',
            '--static',
            default=False,
            action='store_true',
            help=(
                'Return the data from devices as a group after they all return.'
            ),
        )
        self.add_option(
            "--async",
            default=False,
            dest="async",
            action="store_true",
            help=('Run the salt-sproxy command but don\'t wait for a reply.'),
        )
        self.add_option(
            '--dont-cache-grains',
            default=False,
            action='store_true',
            help=('Do not cache the collected Grains for the sproxy devices.'),
        )
        self.add_option(
            '--dont-cache-pillar',
            default=False,
            action='store_true',
            help=('Do not cache the compiled Pillar for the sproxy devices.'),
        )
        self.add_option(
            '--no-cached-grains',
            default=False,
            action='store_true',
            help='Do not use the available cached Grains (if any).',
        )
        self.add_option(
            '--no-cached-pillar',
            default=False,
            action='store_true',
            help='Do not use the available cached Pillar (if any)',
        )
        self.add_option(
            '--no-grains',
            default=False,
            action='store_true',
            help=('Do not attempt to collect Grains at all. Use with care, it '
                  'may lead to unexpected results.'),
        )
        self.add_option(
            '-i',
            '--ignore-host-keys',
            default=False,
            action='store_true',
            dest='ignore_host_keys',
            help=
            ('By default ssh host keys are honored and connections will ask '
             'for approval. Use this option to disable StrictHostKeyChecking.'
             ),
        )
        self.add_option(
            '--no-host-keys',
            default=False,
            action='store_true',
            dest='no_host_keys',
            help=(
                'Fully ignores ssh host keys which by default are honored and '
                'connections would ask for approval. Useful if the host key of '
                'a remote server has changed and would still error with '
                '--ignore-host-keys.'),
        )
        self.add_option(
            '--identities-only',
            default=False,
            action='store_true',
            dest='identities_only',
            help=
            ('Use the only authentication identity files configured in the '
             'ssh_config files. See ``IdentitiesOnly`` flag in man ssh_config.'
             ),
        )
        self.add_option(
            '--priv',
            dest='priv',
            help=
            ('Specify the SSH private key file to be used for authentication.'
             ),
        )
        self.add_option(
            '--priv-passwd',
            dest='priv_passwd',
            help=(
                'Specify the SSH private key file\'s passphrase when required.'
            ),
        )
        self.add_option(
            '--preload-targeting',
            default=False,
            action='store_true',
            help=(
                'Preload Grains for all the devices before targeting.'
                'This is useful to match the devices on dynamic Grains that '
                'do not require the connection with the remote device - e.g., '
                'Grains collected from an external API, etc.'),
        )
        self.add_option(
            '--invasive-targeting',
            default=False,
            action='store_true',
            help=
            ('Collect all the possible data from every device salt-sproxy '
             'is aware of, before distributing the commands. '
             'In other words, this option tells salt-sproxy to connect to '
             'every possible device defined in the Roster of choice, collect '
             'Grains, compile Pillars, etc., and only then execute the '
             'command against the devices matching the target expression.'
             'Use with care, as this may significantly reduce the '
             'performances, but this is the price paid to be able to target '
             'using device properties. '
             'Consider using this option in conjunction with --cache-grains '
             'and / or --cache-pillar to cache the Grains and the Pillars to '
             're-use them straight away next time.'),
        )
        self.add_option(
            '--no-pillar',
            default=False,
            action='store_true',
            help=(
                'Do not compile Pillar at all. Use with care, it may lead to '
                'unexpected results.'),
        )
        self.add_option(
            '-b',
            '--batch',
            '--batch-size',
            dest='batch_size',
            help=('The number of devices to connect to in parallel. '
                  'Default: {} (number of CPUs on your machine)'.format(
                      CPU_COUNT)),
        )
        self.add_option(
            '--preview-target',
            dest='preview_target',
            action='store_true',
            help='Show the devices expected to match the target.',
        )
        self.add_option(
            '--sync-all',
            dest='sync_all',
            action='store_true',
            help=(
                'Load the all extension modules provided with salt-sproxy, as '
                'well as the extension modules from your own environment.'),
        )
        self.add_option(
            '--sync-grains',
            dest='sync_grains',
            action='store_true',
            help=(
                'Re-sync the Grains modules. Useful if you have custom Grains '
                'modules in your own environment.'),
        )
        self.add_option(
            '--sync-modules',
            dest='sync_modules',
            action='store_true',
            help=('Load the salt-sproxy Execution modules.'),
        )
        self.add_option(
            '--sync-roster',
            dest='sync_roster',
            action='store_true',
            help=('Synchronise the Roster modules (both salt-sproxy native '
                  'and provided by the user in their own environment).'),
        )
        self.add_option(
            '--sync-proxy',
            dest='sync_proxy',
            action='store_true',
            help=('Load the salt-sproxy Proxy modules.'),
        )
        self.add_option(
            '--sync-executors',
            dest='sync_executors',
            action='store_true',
            help=('Load the salt-sproxy Executor modules.'),
        )
        self.add_option(
            '--saltenv',
            dest='saltenv_cli',
            help='The Salt environment name to load module and files from',
        )
        self.add_option(
            '--events',
            dest='events',
            action='store_true',
            help=('Whether should put the events on the Salt bus (mostly '
                  'useful when having a Master running).'),
        )
        self.add_option(
            '--use-proxy',
            '--use-existing-proxy',
            '--use-existing-minion',
            dest='use_existing_proxy',
            action='store_true',
            help=('Use the existing Proxy Minions to execute the commands, '
                  'whenever available.'),
        )
        self.add_option(
            '--pillar-root',
            default=None,
            help='Set this directory as the base pillar root.',
        )
        self.add_option(
            '--file-root',
            default=None,
            help='Set this directory as the base file root.',
        )
        self.add_option(
            '--states-dir',
            default=None,
            help='Set this directory to search for additional states.',
        )
        self.add_option(
            '-m',
            '--module-dirs',
            dest='module_dirs_cli',
            default=[],
            action='append',
            help=('Specify an additional directory to pull modules from. '
                  'Multiple directories can be provided by passing '
                  '`-m/--module-dirs` multiple times.'),
        )
        self.add_option(
            '--installation-path',
            dest='installation_path',
            action='store_true',
            help=(
                'Display the absolute path to where salt-sproxy is installed.'
            ),
        )
        self.add_option(
            '--display-file-roots',
            dest='display_file_roots',
            action='store_true',
            help=(
                'Display the file_roots option you would need to configure '
                'in order to use the salt-sproxy extension modules directly, '
                'and, implicitly, leverage the event-driven methodology and '
                'the Salt REST API.'),
        )
        self.add_option(
            '--save-file-roots',
            dest='save_file_roots',
            action='store_true',
            help=(
                'Saves the file_roots configuration so you can start '
                'leveraging the event-driven automation and the Salt REST API.'
            ),
        )
        self.add_option(
            '--config-dump',
            dest='config_dump',
            default=False,
            action='store_true',
            help='Dump the salt-sproxy configuration values',
        )
        self.add_option(
            '--no-connect',
            dest='no_connect',
            action='store_true',
            default=False,
            help=(
                'Do not initiate the connection with the device, only use '
                'cached data to compile data and execute Salt functions that '
                'do not require the actual connection with the device.'),
        )
        self.add_option(
            '--test-ping',
            dest='test_ping',
            action='store_true',
            default=False,
            help=(
                'When using together with --use-existing-proxy, this option can'
                ' help to ensure the existing Proxy Minion is responsive (not '
                'only up and running, by executing a ping test.'),
        )
        self.add_option(
            '--failhard',
            dest='failhard',
            action='store_true',
            default=False,
            help='Stop execution at the first execution error.',
        )
        self.add_option(
            '--target-cache',
            dest='target_cache',
            action='store_true',
            default=False,
            help=(
                'Cache the list of devices matched by your target expression.'
            ),
        )
        group = self.output_options_group = optparse.OptionGroup(
            self, 'Output Options', 'Configure your preferred output format.')
        self.add_option_group(group)

        group.add_option(
            '-q',
            '--quiet',
            default=False,
            action='store_true',
            help='Do not display the results of the run.',
        )
        self.add_option(
            '--summary',
            default=False,
            action='store_true',
            help='Display salt execution summary information.',
        )
        self.add_option(
            '-v',
            '--verbose',
            default=False,
            action='store_true',
            help='Turn on command verbosity, display jid and detailed summary.',
        )
        self.add_option(
            '--show-jid',
            default=False,
            action='store_true',
            help='Display jid without the additional output of --verbose.',
        )
        self.add_option(
            '--hide-timeout',
            default=False,
            action='store_true',
            help='Hide devices that timeout.',
        )
        self.add_option(
            '--batch-wait',
            default=0,
            type=float,
            help=('Wait the specified time in seconds after each batch is done'
                  'before executing the next one.'),
        )
        self.add_option(
            '-p',
            '--progress',
            default=False,
            action='store_true',
            help='Display a progress graph.',
        )
        self.add_option(
            '--return',
            dest='returner',
            default='',
            metavar='RETURNER',
            help=('The name of the Returner module to use for sending data to '
                  'various external systems.'),
        )
        self.add_option(
            '--return-config',
            dest='returner_config',
            default='',
            metavar='RETURNER_CONF',
            help='Specify an alternative Returner config.',
        )
        self.add_option(
            '--return-kwargs',
            dest='returner_kwargs',
            default={},
            metavar='RETURNER_KWARGS',
            help='Set Returner options at the command line.',
        )
        self.add_option(
            "-d",
            "--doc",
            "--documentation",
            dest="doc",
            default=False,
            action="store_true",
            help=('Return the documentation for the specified module or for '
                  'all modules if none are specified.'),
        )

    # Everything else that follows here is verbatim copy from
    # https://github.com/saltstack/salt/blob/develop/salt/utils/parsers.py
    def _mixin_after_parsed(self):
        if (self.options.display_file_roots or self.options.installation_path
                or self.options.save_file_roots or self.options.config_dump):
            # Insert dummy arg when displaying the file_roots
            self.args.append('not_a_valid_target')
            self.args.append('not_a_valid_command')
        elif self.options.doc:
            if len(self.args) == 1:
                self.args.insert(0, 'not_a_valid_target')
            elif len(self.args) == 0:
                self.args.append('not_a_valid_target')
                self.args.append('*')

        if self.options.list:
            try:
                if ',' in self.args[0]:
                    self.config['tgt'] = self.args[0].replace(' ',
                                                              '').split(',')
                else:
                    self.config['tgt'] = self.args[0].split()
            except IndexError:
                self.exit(
                    42,
                    '\nCannot execute command without defining a target.\n\n')
        else:
            try:
                self.config['tgt'] = self.args[0]
            except IndexError:
                self.exit(
                    42,
                    '\nCannot execute command without defining a target.\n\n')

        if self.options.preview_target:
            # Insert dummy arg which won't be used
            self.args.append('not_a_valid_command')

        # Detect compound command and set up the data for it
        if self.args:
            try:
                if ',' in self.args[1]:
                    self.config['fun'] = self.args[1].split(',')
                    self.config['arg'] = [[]]
                    cmd_index = 0
                    if (self.args[2:].count(
                            self.options.args_separator) == len(
                                self.config['fun']) - 1):
                        # new style parsing: standalone argument separator
                        for arg in self.args[2:]:
                            if arg == self.options.args_separator:
                                cmd_index += 1
                                self.config['arg'].append([])
                            else:
                                self.config['arg'][cmd_index].append(arg)
                    else:
                        # old style parsing: argument separator can be inside args
                        for arg in self.args[2:]:
                            if self.options.args_separator in arg:
                                sub_args = arg.split(
                                    self.options.args_separator)
                                for sub_arg_index, sub_arg in enumerate(
                                        sub_args):
                                    if sub_arg:
                                        self.config['arg'][cmd_index].append(
                                            sub_arg)
                                    if sub_arg_index != len(sub_args) - 1:
                                        cmd_index += 1
                                        self.config['arg'].append([])
                            else:
                                self.config['arg'][cmd_index].append(arg)
                        if len(self.config['fun']) > len(self.config['arg']):
                            self.exit(
                                42,
                                'Cannot execute compound command without '
                                'defining all arguments.\n',
                            )
                        elif len(self.config['fun']) < len(self.config['arg']):
                            self.exit(
                                42,
                                'Cannot execute compound command with more '
                                'arguments than commands.\n',
                            )
                    # parse the args and kwargs before sending to the publish
                    # interface
                    for i in range(len(self.config['arg'])):
                        self.config['arg'][i] = salt.utils.args.parse_input(
                            self.config['arg'][i],
                            no_parse=self.options.no_parse)
                else:
                    self.config['fun'] = self.args[1]
                    self.config['arg'] = self.args[2:]
                    # parse the args and kwargs before sending to the publish
                    # interface
                    self.config['arg'] = salt.utils.args.parse_input(
                        self.config['arg'], no_parse=self.options.no_parse)
            except IndexError:
                self.exit(42, '\nIncomplete options passed.\n\n')

    def setup_config(self):
        defaults = config.DEFAULT_MASTER_OPTS.copy()
        defaults['timeout'] = 60
        return config.client_config(self.get_config_file_path(),
                                    defaults=defaults)