Пример #1
0
    def _parse_config_file(self, cfile=None):
        ''' return flat configuration settings from file(s) '''
        # TODO: take list of files with merge/nomerge

        if cfile is None:
            cfile = self._config_file

        ftype = get_config_type(cfile)
        if cfile is not None:
            if ftype == 'ini':
                self._parsers[cfile] = configparser.ConfigParser()
                with open(to_bytes(cfile), 'rb') as f:
                    try:
                        cfg_text = to_text(f.read(), errors='surrogate_or_strict')
                    except UnicodeError as e:
                        raise AssibleOptionsError("Error reading config file(%s) because the config file was not utf8 encoded: %s" % (cfile, to_native(e)))
                try:
                    if PY3:
                        self._parsers[cfile].read_string(cfg_text)
                    else:
                        cfg_file = io.StringIO(cfg_text)
                        self._parsers[cfile].readfp(cfg_file)
                except configparser.Error as e:
                    raise AssibleOptionsError("Error reading config file (%s): %s" % (cfile, to_native(e)))
            # FIXME: this should eventually handle yaml config files
            # elif ftype == 'yaml':
            #     with open(cfile, 'rb') as config_stream:
            #         self._parsers[cfile] = yaml.safe_load(config_stream)
            else:
                raise AssibleOptionsError("Unsupported configuration file type: %s" % to_native(ftype))
Пример #2
0
    def post_process_args(self, options):
        options = super(InventoryCLI, self).post_process_args(options)

        display.verbosity = options.verbosity
        self.validate_conflicts(options)

        # there can be only one! and, at least, one!
        used = 0
        for opt in (options.list, options.host, options.graph):
            if opt:
                used += 1
        if used == 0:
            raise AssibleOptionsError(
                "No action selected, at least one of --host, --graph or --list needs to be specified."
            )
        elif used > 1:
            raise AssibleOptionsError(
                "Conflicting options used, only one of --host, --graph or --list can be used at the same time."
            )

        # set host pattern to default if not supplied
        if options.args:
            options.pattern = options.args
        else:
            options.pattern = 'all'

        return options
Пример #3
0
    def post_process_args(self, options):
        options = super(VaultCLI, self).post_process_args(options)

        display.verbosity = options.verbosity

        if options.vault_ids:
            for vault_id in options.vault_ids:
                if u';' in vault_id:
                    raise AssibleOptionsError(
                        "'%s' is not a valid vault id. The character ';' is not allowed in vault ids"
                        % vault_id)

        if getattr(options, 'output_file', None) and len(options.args) > 1:
            raise AssibleOptionsError(
                "At most one input file may be used with the --output option")

        if options.action == 'encrypt_string':
            if '-' in options.args or not options.args or options.encrypt_string_stdin_name:
                self.encrypt_string_read_stdin = True

            # TODO: prompting from stdin and reading from stdin seem mutually exclusive, but verify that.
            if options.encrypt_string_prompt and self.encrypt_string_read_stdin:
                raise AssibleOptionsError(
                    'The --prompt option is not supported if also reading input from stdin'
                )

        return options
Пример #4
0
def load_extra_vars(loader):
    extra_vars = {}
    for extra_vars_opt in context.CLIARGS.get('extra_vars', tuple()):
        data = None
        extra_vars_opt = to_text(extra_vars_opt, errors='surrogate_or_strict')
        if extra_vars_opt is None or not extra_vars_opt:
            continue

        if extra_vars_opt.startswith(u"@"):
            # Argument is a YAML file (JSON is a subset of YAML)
            data = loader.load_from_file(extra_vars_opt[1:])
        elif extra_vars_opt[0] in [u'/', u'.']:
            raise AssibleOptionsError("Please prepend extra_vars filename '%s' with '@'" % extra_vars_opt)
        elif extra_vars_opt[0] in [u'[', u'{']:
            # Arguments as YAML
            data = loader.load(extra_vars_opt)
        else:
            # Arguments as Key-value
            data = parse_kv(extra_vars_opt)

        if isinstance(data, MutableMapping):
            extra_vars = combine_vars(extra_vars, data)
        else:
            raise AssibleOptionsError("Invalid extra vars data supplied. '%s' could not be made into a dictionary" % extra_vars_opt)

    return extra_vars
Пример #5
0
    def post_process_args(self, options):
        options = super(PullCLI, self).post_process_args(options)

        if not options.dest:
            hostname = socket.getfqdn()
            # use a hostname dependent directory, in case of $HOME on nfs
            options.dest = os.path.join('~/.assible/pull', hostname)
        options.dest = os.path.expandvars(os.path.expanduser(options.dest))

        if os.path.exists(options.dest) and not os.path.isdir(options.dest):
            raise AssibleOptionsError(
                "%s is not a valid or accessible directory." % options.dest)

        if options.sleep:
            try:
                secs = random.randint(0, int(options.sleep))
                options.sleep = secs
            except ValueError:
                raise AssibleOptionsError("%s is not a number." %
                                          options.sleep)

        if not options.url:
            raise AssibleOptionsError(
                "URL for repository not specified, use -h for help")

        if options.module_name not in self.SUPPORTED_REPO_MODULES:
            raise AssibleOptionsError(
                "Unsupported repo module %s, choices are %s" %
                (options.module_name, ','.join(self.SUPPORTED_REPO_MODULES)))

        display.verbosity = options.verbosity
        self.validate_conflicts(options)

        return options
Пример #6
0
    def inventory_graph(self):

        start_at = self._get_group(context.CLIARGS['pattern'])
        if start_at:
            return '\n'.join(self._graph_group(start_at))
        else:
            raise AssibleOptionsError(
                "Pattern must be valid group name when using --graph")
Пример #7
0
    def execute_create(self):
        ''' create and open a file in an editor that will be encrypted with the provided vault secret when closed'''

        if len(context.CLIARGS['args']) != 1:
            raise AssibleOptionsError(
                "assible-vault create can take only one filename argument")

        self.editor.create_file(context.CLIARGS['args'][0],
                                self.encrypt_secret,
                                vault_id=self.encrypt_vault_id)
Пример #8
0
    def run(self):

        super(ConfigCLI, self).run()

        if context.CLIARGS['config_file']:
            self.config_file = unfrackpath(context.CLIARGS['config_file'],
                                           follow=False)
            b_config = to_bytes(self.config_file)
            if os.path.exists(b_config) and os.access(b_config, os.R_OK):
                self.config = ConfigManager(self.config_file)
            else:
                raise AssibleOptionsError(
                    'The provided configuration file is missing or not accessible: %s'
                    % to_native(self.config_file))
        else:
            self.config = ConfigManager()
            self.config_file = find_ini_config_file()

        if self.config_file:
            try:
                if not os.path.exists(self.config_file):
                    raise AssibleOptionsError(
                        "%s does not exist or is not accessible" %
                        (self.config_file))
                elif not os.path.isfile(self.config_file):
                    raise AssibleOptionsError("%s is not a valid file" %
                                              (self.config_file))

                os.environ['ASSIBLE_CONFIG'] = to_native(self.config_file)
            except Exception:
                if context.CLIARGS['action'] in ['view']:
                    raise
                elif context.CLIARGS['action'] in ['edit', 'update']:
                    display.warning(
                        "File does not exist, used empty file: %s" %
                        self.config_file)

        elif context.CLIARGS['action'] == 'view':
            raise AssibleError('Invalid or no config file was supplied')

        context.CLIARGS['func']()
Пример #9
0
    def update_config_data(self, defs=None, configfile=None):
        ''' really: update constants '''

        if defs is None:
            defs = self._base_defs

        if configfile is None:
            configfile = self._config_file

        if not isinstance(defs, dict):
            raise AssibleOptionsError("Invalid configuration definition type: %s for %s" % (type(defs), defs))

        # update the constant for config file
        self.data.update_setting(Setting('CONFIG_FILE', configfile, '', 'string'))

        origin = None
        # env and config defs can have several entries, ordered in list from lowest to highest precedence
        for config in defs:
            if not isinstance(defs[config], dict):
                raise AssibleOptionsError("Invalid configuration definition '%s': type is %s" % (to_native(config), type(defs[config])))

            # get value and origin
            try:
                value, origin = self.get_config_value_and_origin(config, configfile)
            except Exception as e:
                # Printing the problem here because, in the current code:
                # (1) we can't reach the error handler for AssibleError before we
                #     hit a different error due to lack of working config.
                # (2) We don't have access to display yet because display depends on config
                #     being properly loaded.
                #
                # If we start getting double errors printed from this section of code, then the
                # above problem #1 has been fixed.  Revamp this to be more like the try: except
                # in get_config_value() at that time.
                sys.stderr.write("Unhandled error:\n %s\n\n" % traceback.format_exc())
                raise AssibleError("Invalid settings supplied for %s: %s\n" % (config, to_native(e)), orig_exc=e)

            # set the constant
            self.data.update_setting(Setting(config, value, origin, defs[config].get('type', 'string')))
Пример #10
0
def get_config_type(cfile):

    ftype = None
    if cfile is not None:
        ext = os.path.splitext(cfile)[-1]
        if ext in ('.ini', '.cfg'):
            ftype = 'ini'
        elif ext in ('.yaml', '.yml'):
            ftype = 'yaml'
        else:
            raise AssibleOptionsError("Unsupported configuration file extension for %s: %s" % (cfile, to_native(ext)))

    return ftype
Пример #11
0
    def run(self):

        super(InventoryCLI, self).run()

        # Initialize needed objects
        self.loader, self.inventory, self.vm = self._play_prereqs()

        results = None
        if context.CLIARGS['host']:
            hosts = self.inventory.get_hosts(context.CLIARGS['host'])
            if len(hosts) != 1:
                raise AssibleOptionsError(
                    "You must pass a single valid host to --host parameter")

            myvars = self._get_host_variables(host=hosts[0])

            # FIXME: should we template first?
            results = self.dump(myvars)

        elif context.CLIARGS['graph']:
            results = self.inventory_graph()
        elif context.CLIARGS['list']:
            top = self._get_group('all')
            if context.CLIARGS['yaml']:
                results = self.yaml_inventory(top)
            elif context.CLIARGS['toml']:
                results = self.toml_inventory(top)
            else:
                results = self.json_inventory(top)
            results = self.dump(results)

        if results:
            outfile = context.CLIARGS['output_file']
            if outfile is None:
                # FIXME: pager?
                display.display(results)
            else:
                try:
                    with open(to_bytes(outfile), 'wt') as f:
                        f.write(results)
                except (OSError, IOError) as e:
                    raise AssibleError(
                        'Unable to write to destination file (%s): %s' %
                        (to_native(outfile), to_native(e)))
            sys.exit(0)

        sys.exit(1)
Пример #12
0
    def execute_update(self):
        '''
        Updates a single setting in the specified assible.cfg
        '''
        raise AssibleError("Option not implemented yet")

        # pylint: disable=unreachable
        if context.CLIARGS['setting'] is None:
            raise AssibleOptionsError(
                "update option requires a setting to update")

        (entry, value) = context.CLIARGS['setting'].split('=')
        if '.' in entry:
            (section, option) = entry.split('.')
        else:
            section = 'defaults'
            option = entry
        subprocess.call([
            'assible', '-m', 'ini_file', 'localhost', '-c', 'local', '-a',
            '"dest=%s section=%s option=%s value=%s backup=yes"' %
            (self.config_file, section, option, value)
        ])
Пример #13
0
    def run(self):

        super(DocCLI, self).run()

        plugin_type = context.CLIARGS['type']
        do_json = context.CLIARGS['json_format']

        if plugin_type in C.DOCUMENTABLE_PLUGINS:
            loader = getattr(plugin_loader, '%s_loader' % plugin_type)
        else:
            raise AssibleOptionsError(
                "Unknown or undocumentable plugin type: %s" % plugin_type)

        # add to plugin paths from command line
        basedir = context.CLIARGS['basedir']
        if basedir:
            AssibleCollectionConfig.playbook_paths = basedir
            loader.add_directory(basedir, with_subdir=True)

        if context.CLIARGS['module_path']:
            for path in context.CLIARGS['module_path']:
                if path:
                    loader.add_directory(path)

        # save only top level paths for errors
        search_paths = DocCLI.print_paths(loader)
        loader._paths = None  # reset so we can use subdirs below

        # list plugins names or filepath for type, both options share most code
        if context.CLIARGS['list_files'] or context.CLIARGS['list_dir']:

            coll_filter = None
            if len(context.CLIARGS['args']) == 1:
                coll_filter = context.CLIARGS['args'][0]

            if coll_filter in ('', None):
                paths = loader._get_paths_with_context()
                for path_context in paths:
                    self.plugin_list.update(
                        DocCLI.find_plugins(path_context.path,
                                            path_context.internal,
                                            plugin_type))

            add_collection_plugins(self.plugin_list,
                                   plugin_type,
                                   coll_filter=coll_filter)

            # get appropriate content depending on option
            if context.CLIARGS['list_dir']:
                results = self._get_plugin_list_descriptions(loader)
            elif context.CLIARGS['list_files']:
                results = self._get_plugin_list_filenames(loader)

            if do_json:
                jdump(results)
            elif self.plugin_list:
                self.display_plugin_list(results)
            else:
                display.warning("No plugins found.")
        # dump plugin desc/data as JSON
        elif context.CLIARGS['dump']:
            plugin_data = {}
            plugin_names = DocCLI.get_all_plugins_of_type(plugin_type)
            for plugin_name in plugin_names:
                plugin_info = DocCLI.get_plugin_metadata(
                    plugin_type, plugin_name)
                if plugin_info is not None:
                    plugin_data[plugin_name] = plugin_info

            jdump(plugin_data)
        else:
            # display specific plugin docs
            if len(context.CLIARGS['args']) == 0:
                raise AssibleOptionsError("Incorrect options passed")

            # get the docs for plugins in the command line list
            plugin_docs = {}
            for plugin in context.CLIARGS['args']:
                try:
                    doc, plainexamples, returndocs, metadata = DocCLI._get_plugin_doc(
                        plugin, plugin_type, loader, search_paths)
                except PluginNotFound:
                    display.warning("%s %s not found in:\n%s\n" %
                                    (plugin_type, plugin, search_paths))
                    continue
                except Exception as e:
                    display.vvv(traceback.format_exc())
                    raise AssibleError(
                        "%s %s missing documentation (or could not parse"
                        " documentation): %s\n" %
                        (plugin_type, plugin, to_native(e)))

                if not doc:
                    # The doc section existed but was empty
                    continue

                plugin_docs[plugin] = {
                    'doc': doc,
                    'examples': plainexamples,
                    'return': returndocs,
                    'metadata': metadata
                }

            if do_json:
                jdump(plugin_docs)

            else:
                # Some changes to how plain text docs are formatted
                text = []
                for plugin, doc_data in plugin_docs.items():
                    textret = DocCLI.format_plugin_doc(plugin, plugin_type,
                                                       doc_data['doc'],
                                                       doc_data['examples'],
                                                       doc_data['return'],
                                                       doc_data['metadata'])
                    if textret:
                        text.append(textret)
                    else:
                        display.warning(
                            "No valid documentation was retrieved from '%s'" %
                            plugin)

                if text:
                    DocCLI.pager(''.join(text))

        return 0
Пример #14
0
    def run(self):
        super(VaultCLI, self).run()
        loader = DataLoader()

        # set default restrictive umask
        old_umask = os.umask(0o077)

        vault_ids = list(context.CLIARGS['vault_ids'])

        # there are 3 types of actions, those that just 'read' (decrypt, view) and only
        # need to ask for a password once, and those that 'write' (create, encrypt) that
        # ask for a new password and confirm it, and 'read/write (rekey) that asks for the
        # old password, then asks for a new one and confirms it.

        default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
        vault_ids = default_vault_ids + vault_ids

        action = context.CLIARGS['action']

        # TODO: instead of prompting for these before, we could let VaultEditor
        #       call a callback when it needs it.
        if action in ['decrypt', 'view', 'rekey', 'edit']:
            vault_secrets = self.setup_vault_secrets(
                loader,
                vault_ids=vault_ids,
                vault_password_files=list(
                    context.CLIARGS['vault_password_files']),
                ask_vault_pass=context.CLIARGS['ask_vault_pass'])
            if not vault_secrets:
                raise AssibleOptionsError(
                    "A vault password is required to use Assible's Vault")

        if action in ['encrypt', 'encrypt_string', 'create']:

            encrypt_vault_id = None
            # no --encrypt-vault-id context.CLIARGS['encrypt_vault_id'] for 'edit'
            if action not in ['edit']:
                encrypt_vault_id = context.CLIARGS[
                    'encrypt_vault_id'] or C.DEFAULT_VAULT_ENCRYPT_IDENTITY

            vault_secrets = None
            vault_secrets = \
                self.setup_vault_secrets(loader,
                                         vault_ids=vault_ids,
                                         vault_password_files=list(context.CLIARGS['vault_password_files']),
                                         ask_vault_pass=context.CLIARGS['ask_vault_pass'],
                                         create_new_password=True)

            if len(vault_secrets) > 1 and not encrypt_vault_id:
                raise AssibleOptionsError(
                    "The vault-ids %s are available to encrypt. Specify the vault-id to encrypt with --encrypt-vault-id"
                    % ','.join([x[0] for x in vault_secrets]))

            if not vault_secrets:
                raise AssibleOptionsError(
                    "A vault password is required to use Assible's Vault")

            encrypt_secret = match_encrypt_secret(
                vault_secrets, encrypt_vault_id=encrypt_vault_id)

            # only one secret for encrypt for now, use the first vault_id and use its first secret
            # TODO: exception if more than one?
            self.encrypt_vault_id = encrypt_secret[0]
            self.encrypt_secret = encrypt_secret[1]

        if action in ['rekey']:
            encrypt_vault_id = context.CLIARGS[
                'encrypt_vault_id'] or C.DEFAULT_VAULT_ENCRYPT_IDENTITY
            # print('encrypt_vault_id: %s' % encrypt_vault_id)
            # print('default_encrypt_vault_id: %s' % default_encrypt_vault_id)

            # new_vault_ids should only ever be one item, from
            # load the default vault ids if we are using encrypt-vault-id
            new_vault_ids = []
            if encrypt_vault_id:
                new_vault_ids = default_vault_ids
            if context.CLIARGS['new_vault_id']:
                new_vault_ids.append(context.CLIARGS['new_vault_id'])

            new_vault_password_files = []
            if context.CLIARGS['new_vault_password_file']:
                new_vault_password_files.append(
                    context.CLIARGS['new_vault_password_file'])

            new_vault_secrets = \
                self.setup_vault_secrets(loader,
                                         vault_ids=new_vault_ids,
                                         vault_password_files=new_vault_password_files,
                                         ask_vault_pass=context.CLIARGS['ask_vault_pass'],
                                         create_new_password=True)

            if not new_vault_secrets:
                raise AssibleOptionsError(
                    "A new vault password is required to use Assible's Vault rekey"
                )

            # There is only one new_vault_id currently and one new_vault_secret, or we
            # use the id specified in --encrypt-vault-id
            new_encrypt_secret = match_encrypt_secret(
                new_vault_secrets, encrypt_vault_id=encrypt_vault_id)

            self.new_encrypt_vault_id = new_encrypt_secret[0]
            self.new_encrypt_secret = new_encrypt_secret[1]

        loader.set_vault_secrets(vault_secrets)

        # FIXME: do we need to create VaultEditor here? its not reused
        vault = VaultLib(vault_secrets)
        self.editor = VaultEditor(vault)

        context.CLIARGS['func']()

        # and restore umask
        os.umask(old_umask)
Пример #15
0
    def get_config_value_and_origin(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None):
        ''' Given a config key figure out the actual value and report on the origin of the settings '''
        if cfile is None:
            # use default config
            cfile = self._config_file

        # Note: sources that are lists listed in low to high precedence (last one wins)
        value = None
        origin = None

        defs = self.get_configuration_definitions(plugin_type, plugin_name)
        if config in defs:

            aliases = defs[config].get('aliases', [])

            # direct setting via plugin arguments, can set to None so we bypass rest of processing/defaults
            direct_aliases = []
            if direct:
                direct_aliases = [direct[alias] for alias in aliases if alias in direct]
            if direct and config in direct:
                value = direct[config]
                origin = 'Direct'
            elif direct and direct_aliases:
                value = direct_aliases[0]
                origin = 'Direct'

            else:
                # Use 'variable overrides' if present, highest precedence, but only present when querying running play
                if variables and defs[config].get('vars'):
                    value, origin = self._loop_entries(variables, defs[config]['vars'])
                    origin = 'var: %s' % origin

                # use playbook keywords if you have em
                if value is None and keys:
                    if config in keys:
                        value = keys[config]
                        keyword = config

                    elif aliases:
                        for alias in aliases:
                            if alias in keys:
                                value = keys[alias]
                                keyword = alias
                                break

                    if value is not None:
                        origin = 'keyword: %s' % keyword

                # env vars are next precedence
                if value is None and defs[config].get('env'):
                    value, origin = self._loop_entries(py3compat.environ, defs[config]['env'])
                    origin = 'env: %s' % origin

                # try config file entries next, if we have one
                if self._parsers.get(cfile, None) is None:
                    self._parse_config_file(cfile)

                if value is None and cfile is not None:
                    ftype = get_config_type(cfile)
                    if ftype and defs[config].get(ftype):
                        if ftype == 'ini':
                            # load from ini config
                            try:  # FIXME: generalize _loop_entries to allow for files also, most of this code is dupe
                                for ini_entry in defs[config]['ini']:
                                    temp_value = get_ini_config_value(self._parsers[cfile], ini_entry)
                                    if temp_value is not None:
                                        value = temp_value
                                        origin = cfile
                                        if 'deprecated' in ini_entry:
                                            self.DEPRECATED.append(('[%s]%s' % (ini_entry['section'], ini_entry['key']), ini_entry['deprecated']))
                            except Exception as e:
                                sys.stderr.write("Error while loading ini config %s: %s" % (cfile, to_native(e)))
                        elif ftype == 'yaml':
                            # FIXME: implement, also , break down key from defs (. notation???)
                            origin = cfile

                # set default if we got here w/o a value
                if value is None:
                    if defs[config].get('required', False):
                        if not plugin_type or config not in INTERNAL_DEFS.get(plugin_type, {}):
                            raise AssibleError("No setting was provided for required configuration %s" %
                                               to_native(_get_entry(plugin_type, plugin_name, config)))
                    else:
                        value = defs[config].get('default')
                        origin = 'default'
                        # skip typing as this is a templated default that will be resolved later in constants, which has needed vars
                        if plugin_type is None and isinstance(value, string_types) and (value.startswith('{{') and value.endswith('}}')):
                            return value, origin

            # ensure correct type, can raise exceptions on mismatched types
            try:
                value = ensure_type(value, defs[config].get('type'), origin=origin)
            except ValueError as e:
                if origin.startswith('env:') and value == '':
                    # this is empty env var for non string so we can set to default
                    origin = 'default'
                    value = ensure_type(defs[config].get('default'), defs[config].get('type'), origin=origin)
                else:
                    raise AssibleOptionsError('Invalid type for configuration option %s: %s' %
                                              (to_native(_get_entry(plugin_type, plugin_name, config)), to_native(e)))

            # deal with deprecation of the setting
            if 'deprecated' in defs[config] and origin != 'default':
                self.DEPRECATED.append((config, defs[config].get('deprecated')))
        else:
            raise AssibleError('Requested entry (%s) was not defined in configuration.' % to_native(_get_entry(plugin_type, plugin_name, config)))

        return value, origin
Пример #16
0
    def execute_encrypt_string(self):
        ''' encrypt the supplied string using the provided vault secret '''
        b_plaintext = None

        # Holds tuples (the_text, the_source_of_the_string, the variable name if its provided).
        b_plaintext_list = []

        # remove the non-option '-' arg (used to indicate 'read from stdin') from the candidate args so
        # we don't add it to the plaintext list
        args = [x for x in context.CLIARGS['args'] if x != '-']

        # We can prompt and read input, or read from stdin, but not both.
        if context.CLIARGS['encrypt_string_prompt']:
            msg = "String to encrypt: "

            name = None
            name_prompt_response = display.prompt(
                'Variable name (enter for no name): ')

            # TODO: enforce var naming rules?
            if name_prompt_response != "":
                name = name_prompt_response

            # TODO: could prompt for which vault_id to use for each plaintext string
            #       currently, it will just be the default
            # could use private=True for shadowed input if useful
            prompt_response = display.prompt(msg)

            if prompt_response == '':
                raise AssibleOptionsError(
                    'The plaintext provided from the prompt was empty, not encrypting'
                )

            b_plaintext = to_bytes(prompt_response)
            b_plaintext_list.append((b_plaintext, self.FROM_PROMPT, name))

        # read from stdin
        if self.encrypt_string_read_stdin:
            if sys.stdout.isatty():
                display.display(
                    "Reading plaintext input from stdin. (ctrl-d to end input, twice if your content does not already have a newline)",
                    stderr=True)

            stdin_text = sys.stdin.read()
            if stdin_text == '':
                raise AssibleOptionsError('stdin was empty, not encrypting')

            if sys.stdout.isatty() and not stdin_text.endswith("\n"):
                display.display("\n")

            b_plaintext = to_bytes(stdin_text)

            # defaults to None
            name = context.CLIARGS['encrypt_string_stdin_name']
            b_plaintext_list.append((b_plaintext, self.FROM_STDIN, name))

        # use any leftover args as strings to encrypt
        # Try to match args up to --name options
        if context.CLIARGS.get('encrypt_string_names', False):
            name_and_text_list = list(
                zip(context.CLIARGS['encrypt_string_names'], args))

            # Some but not enough --name's to name each var
            if len(args) > len(name_and_text_list):
                # Trying to avoid ever showing the plaintext in the output, so this warning is vague to avoid that.
                display.display(
                    'The number of --name options do not match the number of args.',
                    stderr=True)
                display.display(
                    'The last named variable will be "%s". The rest will not have'
                    ' names.' % context.CLIARGS['encrypt_string_names'][-1],
                    stderr=True)

            # Add the rest of the args without specifying a name
            for extra_arg in args[len(name_and_text_list):]:
                name_and_text_list.append((None, extra_arg))

        # if no --names are provided, just use the args without a name.
        else:
            name_and_text_list = [(None, x) for x in args]

        # Convert the plaintext text objects to bytestrings and collect
        for name_and_text in name_and_text_list:
            name, plaintext = name_and_text

            if plaintext == '':
                raise AssibleOptionsError(
                    'The plaintext provided from the command line args was empty, not encrypting'
                )

            b_plaintext = to_bytes(plaintext)
            b_plaintext_list.append((b_plaintext, self.FROM_ARGS, name))

        # TODO: specify vault_id per string?
        # Format the encrypted strings and any corresponding stderr output
        outputs = self._format_output_vault_strings(
            b_plaintext_list, vault_id=self.encrypt_vault_id)

        for output in outputs:
            err = output.get('err', None)
            out = output.get('out', '')
            if err:
                sys.stderr.write(err)
            print(out)

        if sys.stdout.isatty():
            display.display("Encryption successful", stderr=True)
Пример #17
0
    def run(self):
        ''' use Runner lib to do SSH things '''

        super(PullCLI, self).run()

        # log command line
        now = datetime.datetime.now()
        display.display(now.strftime("Starting Assible Pull at %F %T"))
        display.display(' '.join(sys.argv))

        # Build Checkout command
        # Now construct the assible command
        node = platform.node()
        host = socket.getfqdn()
        limit_opts = 'localhost,%s,127.0.0.1' % ','.join(
            set([host, node,
                 host.split('.')[0],
                 node.split('.')[0]]))
        base_opts = '-c local '
        if context.CLIARGS['verbosity'] > 0:
            base_opts += ' -%s' % ''.join(
                ["v" for x in range(0, context.CLIARGS['verbosity'])])

        # Attempt to use the inventory passed in as an argument
        # It might not yet have been downloaded so use localhost as default
        inv_opts = self._get_inv_cli()
        if not inv_opts:
            inv_opts = " -i localhost, "
            # avoid interpreter discovery since we already know which interpreter to use on localhost
            inv_opts += '-e %s ' % shlex_quote(
                'assible_python_interpreter=%s' % sys.executable)

        # SCM specific options
        if context.CLIARGS['module_name'] == 'git':
            repo_opts = "name=%s dest=%s" % (context.CLIARGS['url'],
                                             context.CLIARGS['dest'])
            if context.CLIARGS['checkout']:
                repo_opts += ' version=%s' % context.CLIARGS['checkout']

            if context.CLIARGS['accept_host_key']:
                repo_opts += ' accept_hostkey=yes'

            if context.CLIARGS['private_key_file']:
                repo_opts += ' key_file=%s' % context.CLIARGS[
                    'private_key_file']

            if context.CLIARGS['verify']:
                repo_opts += ' verify_commit=yes'

            if context.CLIARGS['tracksubs']:
                repo_opts += ' track_submodules=yes'

            if not context.CLIARGS['fullclone']:
                repo_opts += ' depth=1'
        elif context.CLIARGS['module_name'] == 'subversion':
            repo_opts = "repo=%s dest=%s" % (context.CLIARGS['url'],
                                             context.CLIARGS['dest'])
            if context.CLIARGS['checkout']:
                repo_opts += ' revision=%s' % context.CLIARGS['checkout']
            if not context.CLIARGS['fullclone']:
                repo_opts += ' export=yes'
        elif context.CLIARGS['module_name'] == 'hg':
            repo_opts = "repo=%s dest=%s" % (context.CLIARGS['url'],
                                             context.CLIARGS['dest'])
            if context.CLIARGS['checkout']:
                repo_opts += ' revision=%s' % context.CLIARGS['checkout']
        elif context.CLIARGS['module_name'] == 'bzr':
            repo_opts = "name=%s dest=%s" % (context.CLIARGS['url'],
                                             context.CLIARGS['dest'])
            if context.CLIARGS['checkout']:
                repo_opts += ' version=%s' % context.CLIARGS['checkout']
        else:
            raise AssibleOptionsError(
                'Unsupported (%s) SCM module for pull, choices are: %s' %
                (context.CLIARGS['module_name'], ','.join(self.REPO_CHOICES)))

        # options common to all supported SCMS
        if context.CLIARGS['clean']:
            repo_opts += ' force=yes'

        path = module_loader.find_plugin(context.CLIARGS['module_name'])
        if path is None:
            raise AssibleOptionsError(
                ("module '%s' not found.\n" % context.CLIARGS['module_name']))

        bin_path = os.path.dirname(os.path.abspath(sys.argv[0]))
        # hardcode local and inventory/host as this is just meant to fetch the repo
        cmd = '%s/assible %s %s -m %s -a "%s" all -l "%s"' % (
            bin_path, inv_opts, base_opts, context.CLIARGS['module_name'],
            repo_opts, limit_opts)
        for ev in context.CLIARGS['extra_vars']:
            cmd += ' -e %s' % shlex_quote(ev)

        # Nap?
        if context.CLIARGS['sleep']:
            display.display("Sleeping for %d seconds..." %
                            context.CLIARGS['sleep'])
            time.sleep(context.CLIARGS['sleep'])

        # RUN the Checkout command
        display.debug("running assible with VCS module to checkout repo")
        display.vvvv('EXEC: %s' % cmd)
        rc, b_out, b_err = run_cmd(cmd, live=True)

        if rc != 0:
            if context.CLIARGS['force']:
                display.warning(
                    "Unable to update repository. Continuing with (forced) run of playbook."
                )
            else:
                return rc
        elif context.CLIARGS['ifchanged'] and b'"changed": true' not in b_out:
            display.display("Repository has not changed, quitting.")
            return 0

        playbook = self.select_playbook(context.CLIARGS['dest'])
        if playbook is None:
            raise AssibleOptionsError("Could not find a playbook to run.")

        # Build playbook command
        cmd = '%s/assible-playbook %s %s' % (bin_path, base_opts, playbook)
        if context.CLIARGS['vault_password_files']:
            for vault_password_file in context.CLIARGS['vault_password_files']:
                cmd += " --vault-password-file=%s" % vault_password_file
        if context.CLIARGS['vault_ids']:
            for vault_id in context.CLIARGS['vault_ids']:
                cmd += " --vault-id=%s" % vault_id

        for ev in context.CLIARGS['extra_vars']:
            cmd += ' -e %s' % shlex_quote(ev)
        if context.CLIARGS['become_ask_pass']:
            cmd += ' --ask-become-pass'
        if context.CLIARGS['skip_tags']:
            cmd += ' --skip-tags "%s"' % to_native(u','.join(
                context.CLIARGS['skip_tags']))
        if context.CLIARGS['tags']:
            cmd += ' -t "%s"' % to_native(u','.join(context.CLIARGS['tags']))
        if context.CLIARGS['subset']:
            cmd += ' -l "%s"' % context.CLIARGS['subset']
        else:
            cmd += ' -l "%s"' % limit_opts
        if context.CLIARGS['check']:
            cmd += ' -C'
        if context.CLIARGS['diff']:
            cmd += ' -D'

        os.chdir(context.CLIARGS['dest'])

        # redo inventory options as new files might exist now
        inv_opts = self._get_inv_cli()
        if inv_opts:
            cmd += inv_opts

        # RUN THE PLAYBOOK COMMAND
        display.debug("running assible-playbook to do actual work")
        display.debug('EXEC: %s' % cmd)
        rc, b_out, b_err = run_cmd(cmd, live=True)

        if context.CLIARGS['purge']:
            os.chdir('/')
            try:
                shutil.rmtree(context.CLIARGS['dest'])
            except Exception as e:
                display.error(u"Failed to remove %s: %s" %
                              (context.CLIARGS['dest'], to_text(e)))

        return rc
Пример #18
0
    def run(self):
        ''' create and execute the single task playbook '''

        super(AdHocCLI, self).run()

        # only thing left should be host pattern
        pattern = to_text(context.CLIARGS['args'],
                          errors='surrogate_or_strict')

        sshpass = None
        becomepass = None

        (sshpass, becomepass) = self.ask_passwords()
        passwords = {'conn_pass': sshpass, 'become_pass': becomepass}

        # get basic objects
        loader, inventory, variable_manager = self._play_prereqs()

        try:
            hosts = self.get_host_list(inventory, context.CLIARGS['subset'],
                                       pattern)
        except AssibleError:
            if context.CLIARGS['subset']:
                raise
            else:
                hosts = []
                display.warning("No hosts matched, nothing to do")

        if context.CLIARGS['listhosts']:
            display.display('  hosts (%d):' % len(hosts))
            for host in hosts:
                display.display('    %s' % host)
            return 0

        if context.CLIARGS[
                'module_name'] in C.MODULE_REQUIRE_ARGS and not context.CLIARGS[
                    'module_args']:
            err = "No argument passed to %s module" % context.CLIARGS[
                'module_name']
            if pattern.endswith(".yml"):
                err = err + ' (did you mean to run assible-playbook?)'
            raise AssibleOptionsError(err)

        # Avoid modules that don't work with ad-hoc
        if context.CLIARGS['module_name'] in ('import_playbook', ):
            raise AssibleOptionsError(
                "'%s' is not a valid action for ad-hoc commands" %
                context.CLIARGS['module_name'])

        play_ds = self._play_ds(pattern, context.CLIARGS['seconds'],
                                context.CLIARGS['poll_interval'])
        play = Play().load(play_ds,
                           variable_manager=variable_manager,
                           loader=loader)

        # used in start callback
        playbook = Playbook(loader)
        playbook._entries.append(play)
        playbook._file_name = '__adhoc_playbook__'

        if self.callback:
            cb = self.callback
        elif context.CLIARGS['one_line']:
            cb = 'oneline'
        # Respect custom 'stdout_callback' only with enabled 'bin_assible_callbacks'
        elif C.DEFAULT_LOAD_CALLBACK_PLUGINS and C.DEFAULT_STDOUT_CALLBACK != 'default':
            cb = C.DEFAULT_STDOUT_CALLBACK
        else:
            cb = 'minimal'

        run_tree = False
        if context.CLIARGS['tree']:
            C.DEFAULT_CALLBACK_WHITELIST.append('tree')
            C.TREE_DIR = context.CLIARGS['tree']
            run_tree = True

        # now create a task queue manager to execute the play
        self._tqm = None
        try:
            self._tqm = TaskQueueManager(
                inventory=inventory,
                variable_manager=variable_manager,
                loader=loader,
                passwords=passwords,
                stdout_callback=cb,
                run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
                run_tree=run_tree,
                forks=context.CLIARGS['forks'],
            )

            self._tqm.load_callbacks()
            self._tqm.send_callback('v2_playbook_on_start', playbook)

            result = self._tqm.run(play)

            self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats)
        finally:
            if self._tqm:
                self._tqm.cleanup()
            if loader:
                loader.cleanup_all_tmp_files()

        return result
Пример #19
0
    def get_hosts(self,
                  pattern="all",
                  ignore_limits=False,
                  ignore_restrictions=False,
                  order=None):
        """
        Takes a pattern or list of patterns and returns a list of matching
        inventory host names, taking into account any active restrictions
        or applied subsets
        """

        hosts = []

        # Check if pattern already computed
        if isinstance(pattern, list):
            pattern_list = pattern[:]
        else:
            pattern_list = [pattern]

        if pattern_list:
            if not ignore_limits and self._subset:
                pattern_list.extend(self._subset)

            if not ignore_restrictions and self._restriction:
                pattern_list.extend(self._restriction)

            # This is only used as a hash key in the self._hosts_patterns_cache dict
            # a tuple is faster than stringifying
            pattern_hash = tuple(pattern_list)

            if pattern_hash not in self._hosts_patterns_cache:

                patterns = split_host_pattern(pattern)
                hosts = self._evaluate_patterns(patterns)

                # mainly useful for hostvars[host] access
                if not ignore_limits and self._subset:
                    # exclude hosts not in a subset, if defined
                    subset_uuids = set(
                        s._uuid for s in self._evaluate_patterns(self._subset))
                    hosts = [h for h in hosts if h._uuid in subset_uuids]

                if not ignore_restrictions and self._restriction:
                    # exclude hosts mentioned in any restriction (ex: failed hosts)
                    hosts = [h for h in hosts if h.name in self._restriction]

                self._hosts_patterns_cache[pattern_hash] = deduplicate_list(
                    hosts)

            # sort hosts list if needed (should only happen when called from strategy)
            if order in ['sorted', 'reverse_sorted']:
                hosts = sorted(self._hosts_patterns_cache[pattern_hash][:],
                               key=attrgetter('name'),
                               reverse=(order == 'reverse_sorted'))
            elif order == 'reverse_inventory':
                hosts = self._hosts_patterns_cache[pattern_hash][::-1]
            else:
                hosts = self._hosts_patterns_cache[pattern_hash][:]
                if order == 'shuffle':
                    shuffle(hosts)
                elif order not in [None, 'inventory']:
                    raise AssibleOptionsError(
                        "Invalid 'order' specified for inventory hosts: %s" %
                        order)

        return hosts