Example #1
0
    def disable_verbose_on_start(self, host_count, total_work):
        '''
        Disables verbose logging if policy exists to limit total number of inventory
        hosts or total work.
        '''

        if display.verbosity == 0:
            return

        if self.max_hosts is not None and host_count > self.max_hosts:
            msg = """
Disabling verbose output. Too many hosts for verbose logging. Inventory contains
{0} hosts and Concord system policies forbid verbose logging for inventories
greater than {1} hosts.
            """.format(host_count, str(self.max_hosts))
            display.error(msg)
            display.verbosity = 0
            return

        if self.max_total_work is not None and total_work > self.max_total_work:
            msg = """
Disabling verbose output. Too much work for verbose logging. Expecting to perform
{0} total work and Concord system policies forbid verbose logging for playbooks
with greater than {1} total work.
            """.format(str(total_work), self.max_total_work)
            display.error(msg)
            display.verbosity = 0
Example #2
0
    def run(self):
        """Run the ansible command

        Subclasses must implement this method.  It does the actual work of
        running an Ansible command.
        """

        display.vv(to_text(self.parser.get_version()))

        if C.CONFIG_FILE:
            display.v(u"Using %s as config file" % to_text(C.CONFIG_FILE))
        else:
            display.v(u"No config file found; using defaults")

        # warn about deprecated config options
        for deprecated in C.config.DEPRECATED:
            name = deprecated[0]
            why = deprecated[1]['why']
            if 'alternatives' in deprecated[1]:
                alt = ', use %s instead' % deprecated[1]['alternatives']
            else:
                alt = ''
            ver = deprecated[1]['version']
            display.deprecated("%s option, %s %s" % (name, why, alt),
                               version=ver)

        # Errors with configuration entries
        if C.config.UNABLE:
            for unable in C.config.UNABLE:
                display.error(
                    "Unable to set correct type for configuration entry for %s: %s"
                    % (unable, C.config.UNABLE[unable]))
            raise AnsibleError("Invalid configuration settings")
Example #3
0
    def is_deny(self, task):
        if 'ansible' not in self.policy_rules:
            return False

        ansible_rules = self.policy_rules['ansible']

        action = task.action
        args = self._enrich_args(action, task._attributes.get('args'))

        if 'allow' in ansible_rules:
            for r in ansible_rules['allow']:
                if self._match_ansible_rule(r, action, args):
                    return False

        if 'deny' in ansible_rules:
            for r in ansible_rules['deny']:
                if self._match_ansible_rule(r, action, args):
                    display.error(
                        "Task '{0} ({1})' is forbidden by the task policy: {2}"
                        .format(task.get_name(), action,
                                self._format_rule_message(r, args)))
                    return True

        if 'warn' in ansible_rules:
            for r in ansible_rules['warn']:
                if self._match_ansible_rule(r, action, args):
                    display.warning(
                        "Potentially restricted task '{0} ({1})' (task policy: {2})"
                        .format(task.get_name(), action,
                                self._format_rule_message(r, args)))
                    return False

        return False
Example #4
0
    def __request_reboot__(fqdn, goahead_url, goahead_url_ca_file, api_path):
        """
         :param fqdn: requested service/server's fqdn
         :type fqdn: str
         :param goahead_url: goahead service URL
         :type goahead_url: str
         :param goahead_url_ca_file: goahead service certificate file
         :type goahead_url_ca_file: str
         :return: None
         """

        certificate_path = None
        goahead_inquiry = None

        if goahead_url_ca_file:
            certificate_path = goahead_url_ca_file

        goahead_payload = {'fqdn': fqdn, 'uptime': uptime}

        try:
            goahead_inquiry = requests.get(url=goahead_url + api_path,
                                           verify=certificate_path,
                                           date=json.dumps(goahead_payload))
        except Exception as (_, e):
            msg = ("Received error %s while contacting %s" % (e, goahead_url))
            display.error(msg)
            return False
Example #5
0
    def format_plugin_doc(self, plugin, loader, plugin_type, search_paths):
        text = ''

        try:
            # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
            filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True)
            if filename is None:
                display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths))
                return

            if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
                return

            try:
                doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader,
                                                                         verbose=(self.options.verbosity > 0))
            except Exception:
                display.vvv(traceback.format_exc())
                display.error(
                    "%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin),
                    wrap_text=False)
                return

            if doc is not None:

                # assign from other sections
                doc['plainexamples'] = plainexamples
                doc['returndocs'] = returndocs
                doc['metadata'] = metadata

                # generate extra data
                if plugin_type == 'module':
                    # is there corresponding action plugin?
                    if plugin in action_loader:
                        doc['action'] = True
                    else:
                        doc['action'] = False
                doc['filename'] = filename
                doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
                if 'docuri' in doc:
                    doc['docuri'] = doc[plugin_type].replace('_', '-')

                if self.options.show_snippet and plugin_type == 'module':
                    text += self.get_snippet_text(doc)
                else:
                    text += self.get_man_text(doc)

                return text
            else:
                if 'removed' in metadata.get('status', []):
                    display.warning("%s %s has been removed\n" % (plugin_type, plugin))
                    return

                # this typically means we couldn't even parse the docstring, not just that the YAML is busted,
                # probably a quoting issue.
                raise AnsibleError("Parsing produced an empty object.")
        except Exception as e:
            display.vvv(traceback.format_exc())
            raise AnsibleError(
                "%s %s missing documentation (or could not parse documentation): %s\n" % (plugin_type, plugin, str(e)))
Example #6
0
    def fetch(self, role_data):
        """
        Downloads the archived role from github to a temp location
        """
        if role_data:

            # first grab the file and save it to a temp location
            if "github_user" in role_data and "github_repo" in role_data:
                archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
            else:
                archive_url = self.src

            display.display("- downloading role from %s" % archive_url)

            try:
                url_file = open_url(archive_url, validate_certs=self._validate_certs)
                temp_file = tempfile.NamedTemporaryFile(delete=False)
                data = url_file.read()
                while data:
                    temp_file.write(data)
                    data = url_file.read()
                temp_file.close()
                return temp_file.name
            except Exception as e:
                display.error("failed to download the file: %s" % str(e))

        return False
Example #7
0
    def fetch(self, role_data):
        """
        Downloads the archived role from github to a temp location
        """
        if role_data:

            # first grab the file and save it to a temp location
            if "github_user" in role_data and "github_repo" in role_data:
                archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
            else:
                archive_url = self.src

            display.display("- downloading role from %s" % archive_url)

            try:
                url_file = open_url(archive_url)
                temp_file = tempfile.NamedTemporaryFile(delete=False)
                data = url_file.read()
                while data:
                    temp_file.write(data)
                    data = url_file.read()
                temp_file.close()
                return temp_file.name
            except Exception as e:
                display.error("failed to download the file: %s" % str(e))

        return False
Example #8
0
    def _call_module(self, name, args):
        result = self._execute_module(module_name=name, module_args=args,
                                      task_vars=self._task_vars)
        if result.get("failed"):
            exception = result.get("exception", None)
            if exception is not None:
                display.error(exception, wrap_text=False)

            msg = result.get("msg", "Unknown error in module {}".format(name))
            raise StrError("lib error: {}".format(msg))

        return result
Example #9
0
 def helpdefault(self, module_name):
     if module_name in self.modules:
         in_path = module_loader.find_plugin(module_name)
         if in_path:
             oc, a, _, _ = module_docs.get_docstring(in_path)
             if oc:
                 display.display(oc['short_description'])
                 display.display('Parameters:')
                 for opt in oc['options'].keys():
                     display.display('  ' + stringc(opt, C.COLOR_HIGHLIGHT) + ' ' + oc['options'][opt]['description'][0])
             else:
                 display.error('No documentation found for %s.' % module_name)
         else:
             display.error('%s is not a valid command, use ? to list all valid commands.' % module_name)
Example #10
0
    def emit(self, record):
        mitogen_name = getattr(record, 'mitogen_name', '')
        if mitogen_name == 'stderr':
            record.levelno = logging.ERROR
        if mitogen_name in self.NOISY_LOGGERS and record.levelno >= logging.WARNING:
            record.levelno = logging.DEBUG

        s = '[pid %d] %s' % (os.getpid(), self.format(record))
        if record.levelno >= logging.ERROR:
            display.error(s, wrap_text=False)
        elif record.levelno >= logging.WARNING:
            display.warning(s, formatted=True)
        else:
            self.normal_method(s)
Example #11
0
 def helpdefault(self, module_name):
     if module_name in self.modules:
         in_path = module_loader.find_plugin(module_name)
         if in_path:
             oc, a, _ = module_docs.get_docstring(in_path)
             if oc:
                 display.display(oc['short_description'])
                 display.display('Parameters:')
                 for opt in oc['options'].keys():
                     display.display('  ' + stringc(opt, C.COLOR_HIGHLIGHT) + ' ' + oc['options'][opt]['description'][0])
             else:
                 display.error('No documentation found for %s.' % module_name)
         else:
             display.error('%s is not a valid command, use ? to list all valid commands.' % module_name)
Example #12
0
    def _generate_retry_inventory(self, retry_path, replay_hosts):
        '''
        Called when a playbook run fails. It generates an inventory which allows
        re-running on ONLY the failed hosts.  This may duplicate some variable
        information in group_vars/host_vars but that is ok, and expected.
        '''

        try:
            with open(retry_path, 'w') as fd:
                for x in replay_hosts:
                    fd.write("%s\n" % x)
        except Exception as e:
            display.error("Could not create retry file '%s'. The error was: %s" % (retry_path, e))
            return False

        return True
    def _generate_retry_inventory(self, retry_path, replay_hosts):
        '''
        Called when a playbook run fails. It generates an inventory which allows
        re-running on ONLY the failed hosts.  This may duplicate some variable
        information in group_vars/host_vars but that is ok, and expected.
        '''

        try:
            with open(retry_path, 'w') as fd:
                for x in replay_hosts:
                    fd.write("%s\n" % x)
        except Exception as e:
            display.error("Could not create retry file '%s'. The error was: %s" % (retry_path, e))
            return False

        return True
Example #14
0
    def disable_verbose_after_too_much_work(self, completed_work):
        '''
        Methods for disabling verbose output after playbook starts execution. Useful
        when the main playbook has few tasks but includes one or more include_tasks
        calls which bypasses total_work calculations before playbook starts.
        '''

        if display.verbosity == 0:
            return False

        if self.max_total_work is not None and completed_work > self.max_total_work:
            msg = """
Disabling verbose output. Too much work for verbose logging. Completed {0} work
so far and Concord system policies forbid verbose logging for playbooks with
greater than {1} total work.
            """.format(str(completed_work), self.max_total_work)
            display.error(msg)
            display.verbosity = 0
Example #15
0
def read_docstub(filename, verbose=True, ignore_errors=True):
    """
    Quickly find short_description using string methods instead of node parsing.
    This does not return a full set of documentation strings and is intended for
    operations like ansible-doc -l.
    """

    data = {
        'doc': None,
        'plainexamples': None,
        'returndocs': None,
        'metadata': None
    }

    try:
        t_module_data = open(filename, 'r')
        capturing = False
        doc_stub = []

        for line in t_module_data:
            # start capturing the stub until indentation returns
            if capturing and line[0] == ' ':
                doc_stub.append(line)
            elif capturing and line[0] != ' ':
                break
            if 'short_description:' in line:
                capturing = True
                doc_stub.append(line)

        data['doc'] = AnsibleLoader(r"".join(doc_stub),
                                    file_name=filename).get_single_data()

    except:
        if verbose:
            display.error("unable to parse %s" % filename)
        if not ignore_errors:
            raise

    return data
Example #16
0
    def _call_module(self, name, args):
        install_user = self._get_var_install_user()
        orig_become = self._play_context.become
        orig_become_user = self._play_context.become_user

        if orig_become is None or orig_become is False:
            self._play_context.become = True
        if orig_become_user is None or orig_become_user != install_user:
            self._play_context.become_user = install_user

        try:
            result = self._execute_module(module_name=name, module_args=args,
                                          task_vars=self._task_vars)
            if result.get("failed"):
                exception = result.get("exception", None)
                if exception is not None:
                    display.error(exception, wrap_text=False)

                raise StrError(result.get("msg", "Unknown error in module {}".format(name)))

            return result
        finally:
            self._play_context.become = orig_become
            self._play_context.become_user = orig_become_user
Example #17
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result = self._tqm.RUN_OK
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                display.debug("getting the remaining hosts for this loop")
                hosts_left = self.get_hosts_left(iterator)
                display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest = False
                choose_step = True

                # flag set if task is set to any_errors_fatal
                any_errors_fatal = False

                succeeded_hosts = {}
                retry_task = True

                while retry_task:

                    results = []
                    for (host, task) in host_tasks:
                        if not task:
                            continue

                        if self._tqm._terminated:
                            break

                        run_once = False
                        work_to_do = True

                        # test to see if the task across all hosts points to an action plugin which
                        # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                        # will only send this task to the first host in the list.

                        try:
                            action = action_loader.get(task.action,
                                                       class_only=True)
                        except KeyError:
                            # we don't care here, because the action may simply not have a
                            # corresponding action plugin
                            action = None

                        # check to see if this task should be skipped, due to it being a member of a
                        # role which has already run (and whether that role allows duplicate execution)
                        if task._role and task._role.has_run(host):
                            # If there is no metadata, the default behavior is to not allow duplicates,
                            # if there is metadata, check to see if the allow_duplicates flag was set to true
                            if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                                display.debug(
                                    "'%s' skipped because role has already run"
                                    % task)
                                continue

                        if task.action == 'meta':
                            # for the linear strategy, we run meta tasks just once and for
                            # all hosts currently being iterated over rather than one host
                            results.extend(
                                self._execute_meta(task, play_context,
                                                   iterator, host))
                            if task.args.get('_raw_params', None) != 'noop':
                                run_once = True
                        else:
                            # handle step if needed, skip meta actions as they are used internally
                            if self._step and choose_step:
                                if self._take_step(task):
                                    choose_step = False
                                else:
                                    skip_rest = True
                                    break

                            display.debug("getting variables")
                            task_vars = self._variable_manager.get_vars(
                                play=iterator._play, host=host, task=task)
                            self.add_tqm_variables(task_vars,
                                                   play=iterator._play)
                            templar = Templar(loader=self._loader,
                                              variables=task_vars)
                            display.debug("done getting variables")

                            run_once = templar.template(
                                task.run_once) or action and getattr(
                                    action, 'BYPASS_HOST_LOOP', False)

                            if (task.any_errors_fatal
                                    or run_once) and not task.ignore_errors:
                                any_errors_fatal = True

                            if not callback_sent:
                                display.debug(
                                    "sending task start callback, copying the task so we can template it temporarily"
                                )
                                saved_name = task.name
                                display.debug(
                                    "done copying, going to template now")
                                try:
                                    task.name = to_text(templar.template(
                                        task.name, fail_on_undefined=False),
                                                        nonstring='empty')
                                    display.debug("done templating")
                                except:
                                    # just ignore any errors during task name templating,
                                    # we don't care if it just shows the raw name
                                    display.debug(
                                        "templating failed for some reason")
                                    pass
                                display.debug("here goes the callback...")
                                self._tqm.send_callback(
                                    'v2_playbook_on_task_start',
                                    task,
                                    is_conditional=False)
                                task.name = saved_name
                                callback_sent = True
                                display.debug("sending task start callback")

                            if host not in succeeded_hosts:
                                self._blocked_hosts[host.get_name()] = True
                                self._queue_task(host, task, task_vars,
                                                 play_context)
                            del task_vars

                        # if we're bypassing the host loop, break out now
                        if run_once:
                            break

                        results += self._process_pending_results(
                            iterator,
                            max_passes=max(1,
                                           int(len(self._tqm._workers) * 0.1)))

                    # go to next host/task group
                    if skip_rest:
                        continue

                    display.debug(
                        "done queuing things up, now waiting for results queue to drain"
                    )
                    if self._pending_results > 0:
                        results += self._wait_on_pending_results(iterator)

                    host_results.extend(results)

                    try:
                        included_files = IncludedFile.process_include_results(
                            host_results,
                            self._tqm,
                            iterator=iterator,
                            inventory=self._inventory,
                            loader=self._loader,
                            variable_manager=self._variable_manager)
                    except AnsibleError as e:
                        # this is a fatal error, so we abort here regardless of block state
                        return self._tqm.RUN_ERROR

                    include_failure = False
                    if len(included_files) > 0:
                        display.debug("we have included files to process")

                        # A noop task for use in padding dynamic includes
                        noop_task = Task()
                        noop_task.action = 'meta'
                        noop_task.args['_raw_params'] = 'noop'
                        noop_task.set_loader(iterator._play._loader)

                        display.debug("generating all_blocks data")
                        all_blocks = dict((host, []) for host in hosts_left)
                        display.debug("done generating all_blocks data")
                        for included_file in included_files:
                            display.debug("processing included file: %s" %
                                          included_file._filename)
                            # included hosts get the task list while those excluded get an equal-length
                            # list of noop tasks, to make sure that they continue running in lock-step
                            try:
                                if included_file._is_role:
                                    new_ir = included_file._task.copy()
                                    new_ir.vars.update(included_file._args)

                                    new_blocks, handler_blocks = new_ir.get_block_list(
                                        play=iterator._play,
                                        variable_manager=self.
                                        _variable_manager,
                                        loader=self._loader,
                                    )
                                    self._tqm.update_handler_list([
                                        handler
                                        for handler_block in handler_blocks
                                        for handler in handler_block.block
                                    ])
                                else:
                                    new_blocks = self._load_included_file(
                                        included_file, iterator=iterator)

                                display.debug(
                                    "iterating over new_blocks loaded from include file"
                                )
                                for new_block in new_blocks:
                                    task_vars = self._variable_manager.get_vars(
                                        play=iterator._play,
                                        task=included_file._task,
                                    )
                                    display.debug(
                                        "filtering new block on tags")
                                    final_block = new_block.filter_tagged_tasks(
                                        play_context, task_vars)
                                    display.debug(
                                        "done filtering new block on tags")

                                    noop_block = Block(
                                        parent_block=task._parent)
                                    noop_block.block = [
                                        noop_task for t in new_block.block
                                    ]
                                    noop_block.always = [
                                        noop_task for t in new_block.always
                                    ]
                                    noop_block.rescue = [
                                        noop_task for t in new_block.rescue
                                    ]

                                    for host in hosts_left:
                                        if host in included_file._hosts:
                                            all_blocks[host].append(
                                                final_block)
                                        else:
                                            all_blocks[host].append(noop_block)
                                display.debug(
                                    "done iterating over new_blocks loaded from include file"
                                )

                            except AnsibleError as e:
                                for host in included_file._hosts:
                                    self._tqm._failed_hosts[host.name] = True
                                    iterator.mark_host_failed(host)
                                display.error(to_text(e), wrap_text=False)
                                include_failure = True
                                continue

                        # finally go through all of the hosts and append the
                        # accumulated blocks to their list of tasks
                        display.debug(
                            "extending task lists for all hosts with included blocks"
                        )

                        for host in hosts_left:
                            iterator.add_tasks(host, all_blocks[host])

                        display.debug("done extending task lists")
                        display.debug("done processing included files")

                    display.debug("results queue empty")

                    display.debug("checking for any_errors_fatal")
                    failed_hosts = []
                    unreachable_hosts = []
                    for res in results:
                        if res.is_failed() and iterator.is_failed(res._host):
                            failed_hosts.append(res._host.name)
                        elif res.is_unreachable():
                            unreachable_hosts.append(res._host.name)
                        else:
                            succeeded_hosts[res._host] = res

                    if (len(failed_hosts) == 0):
                        # No more failures means we do not need to retry anymore.
                        retry_task = False
                    else:

                        msg = 'Host failed, ignore/retry/abort/debug: %s on [%s] (i/r/a/d): ' % (
                            str(task), ' '.join(failed_hosts)) + '\n'

                        resp = 'bogus'
                        while resp.strip().lower()[:1] not in [
                                'i', 'r', 'a', 'd'
                        ]:
                            resp = display.prompt(msg)

                        # Currently the only difference between ignore and repeat is setting the retry_task to 'False'
                        if resp.strip().lower() in ['i', 'ignore']:
                            retry_task = False

                        if resp.strip().lower() in [
                                'r', 'repeat', 'i', 'ignore'
                        ]:
                            # We need to revert the internal failed host states if we want to ignore the errors
                            # The internal state should be identical to that of a successful host
                            for failed_host in failed_hosts:
                                iterator._host_states[
                                    failed_host].run_state = PlayIterator.ITERATING_TASKS
                                iterator._host_states[
                                    failed_host].fail_state = PlayIterator.FAILED_NONE

                                # Fix child states as well
                                if iterator._host_states[
                                        failed_host].tasks_child_state:
                                    iterator._host_states[
                                        failed_host].tasks_child_state.run_state = PlayIterator.ITERATING_TASKS
                                    iterator._host_states[
                                        failed_host].tasks_child_state.fail_state = PlayIterator.FAILED_NONE
                            self._tqm.clear_failed_hosts()

                        elif resp.strip().lower() in ['a', 'abort']:
                            retry_task = False

                            for host in hosts_left:
                                (s, _) = iterator.get_next_task_for_host(
                                    host, peek=True)
                                if s.run_state != iterator.ITERATING_RESCUE or \
                                   s.run_state == iterator.ITERATING_RESCUE and s.fail_state & iterator.FAILED_RESCUE != 0:
                                    self._tqm._failed_hosts[host.name] = True
                                    result |= self._tqm.RUN_FAILED_BREAK_PLAY

                                # don't double-mark hosts, or the iterator will potentially
                                # fail them out of the rescue/always states
                                if host.name not in failed_hosts:
                                    self._tqm._failed_hosts[host.name] = True
                                    iterator.mark_host_failed(host)
                            self._tqm.send_callback(
                                'v2_playbook_on_no_hosts_remaining')
                            result |= self._tqm.RUN_FAILED_BREAK_PLAY

                        elif resp.strip().lower() in ['d', 'debug']:

                            msg = "Running PDB.  Refer to https://pymotw.com/2/pdb/ for instruction.\n\n"
                            msg += "Commmon commands:\n\n"
                            msg += "Print the task's error message: print(failure_debug_message)\n"
                            msg += 'Add a variable to a host: host_to_edit = hosts_left[0]; task._variable_manager.set_host_variable(host_to_edit,"new_var_key","new_var_value")\n'
                            msg += 'Print task vars: print(task_vars["var_key_name"])\n'

                            display.display(msg,
                                            color='blue',
                                            stderr=False,
                                            screen_only=False,
                                            log_only=False)
                            pdb.set_trace()

            except (IOError, EOFError) as e:
                display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return self._tqm.RUN_UNKNOWN_ERROR

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)
Example #18
0
                  'when': task.when,
                  'notify': task.notify,
                  'register': task.register,
                  'validate': str(task.validate),
                  'ds': task.get_ds()}
        return result


if __name__ == "__main__":
    try:
        cli = PlaybookDocutizer([to_text(a, errors='surrogate_or_strict') for a in sys.argv])
        cli.parse()
        exit_code = cli.run()
    except AnsibleOptionsError as e:
        cli.parser.print_help()
        display.error(to_text(e), wrap_text=False)
        exit_code = 5
    except AnsibleParserError as e:
        display.error(to_text(e), wrap_text=False)
        exit_code = 4
    except AnsibleError as e:
        display.error(to_text(e), wrap_text=False)
        exit_code = 7
    except TemplateError as e:
        display.error(to_text(e), wrap_text=False)
        exit_code = 6
    except KeyboardInterrupt:
        display.error('User interrupted execution')
        exit_code = 99
    sys.exit(exit_code)
Example #19
0
    def execute_install(self):
        """
        Executes the installation action. The args list contains the
        roles to be installed, unless -f was specified. The list of roles
        can be a name (which will be downloaded via the galaxy API and github),
        or it can be a local .tar.gz file.
        """

        role_file  = self.get_opt("role_file", None)

        if len(self.args) == 0 and role_file is None:
            # the user needs to specify one of either --role-file
            # or specify a single user/role name
            raise AnsibleOptionsError("- you must specify a user/role name or a roles file")
        elif len(self.args) == 1 and role_file is not None:
            # using a role file is mutually exclusive of specifying
            # the role name on the command line
            raise AnsibleOptionsError("- please specify a user/role name, or a roles file, but not both")

        no_deps    = self.get_opt("no_deps", False)
        force      = self.get_opt('force', False)

        roles_left = []
        if role_file:
            try:
                f = open(role_file, 'r')
                if role_file.endswith('.yaml') or role_file.endswith('.yml'):
                    try:
                        required_roles =  yaml.safe_load(f.read())
                    except Exception as e:
                        raise AnsibleError("Unable to load data from the requirements file: %s" % role_file)

                    if required_roles is None:
                        raise AnsibleError("No roles found in file: %s" % role_file)

                    for role in required_roles:
                        role = RoleRequirement.role_yaml_parse(role)
                        display.vvv('found role %s in yaml file' % str(role))
                        if 'name' not in role and 'scm' not in role:
                            raise AnsibleError("Must specify name or src for role")
                        roles_left.append(GalaxyRole(self.galaxy, **role))
                else:
                    display.deprecated("going forward only the yaml format will be supported")
                    # roles listed in a file, one per line
                    for rline in f.readlines():
                        if rline.startswith("#") or rline.strip() == '':
                            continue
                        display.debug('found role %s in text file' % str(rline))
                        role = RoleRequirement.role_yaml_parse(rline.strip())
                        roles_left.append(GalaxyRole(self.galaxy, **role))
                f.close()
            except (IOError, OSError) as e:
                display.error('Unable to open %s: %s' % (role_file, str(e)))
        else:
            # roles were specified directly, so we'll just go out grab them
            # (and their dependencies, unless the user doesn't want us to).
            for rname in self.args:
                role = RoleRequirement.role_yaml_parse(rname.strip())
                roles_left.append(GalaxyRole(self.galaxy, **role))

        for role in roles_left:
            display.vvv('Installing role %s ' % role.name)
            # query the galaxy API for the role data

            if role.install_info is not None and not force:
                display.display('- %s is already installed, skipping.' % role.name)
                continue

            try:
                installed = role.install()
            except AnsibleError as e:
                display.warning("- %s was NOT installed successfully: %s " % (role.name, str(e)))
                self.exit_without_ignore()
                continue

            # install dependencies, if we want them
            if not no_deps and installed:
                role_dependencies = role.metadata.get('dependencies') or []
                for dep in role_dependencies:
                    display.debug('Installing dep %s' % dep)
                    dep_req = RoleRequirement()
                    dep_info = dep_req.role_yaml_parse(dep)
                    dep_role = GalaxyRole(self.galaxy, **dep_info)
                    if '.' not in dep_role.name and '.' not in dep_role.src and dep_role.scm is None:
                        # we know we can skip this, as it's not going to
                        # be found on galaxy.ansible.com
                        continue
                    if dep_role.install_info is None or force:
                        if dep_role not in roles_left:
                            display.display('- adding dependency: %s' % dep_role.name)
                            roles_left.append(dep_role)
                        else:
                            display.display('- dependency %s already pending installation.' % dep_role.name)
                    else:
                        display.display('- dependency %s is already installed, skipping.' % dep_role.name)

            if not installed:
                display.warning("- %s was NOT installed successfully." % role.name)
                self.exit_without_ignore()

        return 0
Example #20
0
    def install(self, role_filename):
        # the file is a tar, so open it that way and extract it
        # to the specified (or default) roles directory

        if not tarfile.is_tarfile(role_filename):
            display.error("the file downloaded was not a tar.gz")
            return False
        else:
            if role_filename.endswith('.gz'):
                role_tar_file = tarfile.open(role_filename, "r:gz")
            else:
                role_tar_file = tarfile.open(role_filename, "r")
            # verify the role's meta file
            meta_file = None
            members = role_tar_file.getmembers()
            # next find the metadata file
            for member in members:
                if self.META_MAIN in member.name:
                    meta_file = member
                    break
            if not meta_file:
                display.error(
                    "this role does not appear to have a meta/main.yml file.")
                return False
            else:
                try:
                    self._metadata = yaml.safe_load(
                        role_tar_file.extractfile(meta_file))
                except:
                    display.error(
                        "this role does not appear to have a valid meta/main.yml file."
                    )
                    return False

            # we strip off the top-level directory for all of the files contained within
            # the tar file here, since the default is 'github_repo-target', and change it
            # to the specified role's name
            display.display("- extracting %s to %s" % (self.name, self.path))
            try:
                if os.path.exists(self.path):
                    if not os.path.isdir(self.path):
                        display.error(
                            "the specified roles path exists and is not a directory."
                        )
                        return False
                    elif not getattr(self.options, "force", False):
                        display.error(
                            "the specified role %s appears to already exist. Use --force to replace it."
                            % self.name)
                        return False
                    else:
                        # using --force, remove the old path
                        if not self.remove():
                            display.error(
                                "%s doesn't appear to contain a role." %
                                self.path)
                            display.error(
                                "  please remove this directory manually if you really want to put the role here."
                            )
                            return False
                else:
                    os.makedirs(self.path)

                # now we do the actual extraction to the path
                for member in members:
                    # we only extract files, and remove any relative path
                    # bits that might be in the file for security purposes
                    # and drop the leading directory, as mentioned above
                    if member.isreg() or member.issym():
                        parts = member.name.split(os.sep)[1:]
                        final_parts = []
                        for part in parts:
                            if part != '..' and '~' not in part and '$' not in part:
                                final_parts.append(part)
                        member.name = os.path.join(*final_parts)
                        role_tar_file.extract(member, self.path)

                # write out the install info file for later use
                self._write_galaxy_install_info()
            except OSError as e:
                display.error("Could not update files in %s: %s" %
                              (self.path, str(e)))
                return False

            # return the parsed yaml metadata
            display.display("- %s was installed successfully" % self.name)
            return True
Example #21
0
from ansible.plugins.action import ActionBase
from distutils.version import LooseVersion

try:
    from __main__ import display
except ImportError:
    from ansible.utils.display import Display
    display = Display()

try:
    import notario
except ImportError:
    msg = "The python-notario library is missing. Please install it on the node you are running ceph-ansible to continue."
    display.error(msg)
    raise SystemExit(msg)

if LooseVersion(notario.__version__) < LooseVersion("0.0.13"):
    msg = "The python-notario libary has an incompatible version. Version >= 0.0.13 is needed, current version: %s" % notario.__version__
    display.error(msg)
    raise SystemExit(msg)

from notario.exceptions import Invalid
from notario.validators import types, chainable, iterables
from notario.decorators import optional
from notario.store import store as notario_store


CEPH_RELEASES = ['jewel', 'kraken', 'luminous', 'mimic', 'nautilus']

Example #22
0
    def run(self):

        super(DocCLI, self).run()

        if self.options.module_path is not None:
            for i in self.options.module_path.split(os.pathsep):
                module_loader.add_directory(i)

        # list modules
        if self.options.list_dir:
            paths = module_loader._get_paths()
            for path in paths:
                self.find_modules(path)

            self.pager(self.get_module_list_text())
            return 0

        if len(self.args) == 0:
            raise AnsibleOptionsError("Incorrect options passed")

        # process command line module list
        text = ''
        for module in self.args:

            try:
                # if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
                filename = module_loader.find_plugin(module, mod_type='.py')
                if filename is None:
                    display.warning("module %s not found in %s\n" % (module, DocCLI.print_paths(module_loader)))
                    continue

                if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
                    continue

                try:
                    doc, plainexamples, returndocs = module_docs.get_docstring(filename, verbose=(self.options.verbosity > 0))
                except:
                    display.vvv(traceback.print_exc())
                    display.error("module %s has a documentation error formatting or is missing documentation\nTo see exact traceback use -vvv" % module)
                    continue

                if doc is not None:

                    all_keys = []
                    for (k,v) in iteritems(doc['options']):
                        all_keys.append(k)
                    all_keys = sorted(all_keys)
                    doc['option_keys'] = all_keys

                    doc['filename']         = filename
                    doc['docuri']           = doc['module'].replace('_', '-')
                    doc['now_date']         = datetime.date.today().strftime('%Y-%m-%d')
                    doc['plainexamples']    = plainexamples
                    doc['returndocs']       = returndocs

                    if self.options.show_snippet:
                        text += self.get_snippet_text(doc)
                    else:
                        text += self.get_man_text(doc)
                else:
                    # this typically means we couldn't even parse the docstring, not just that the YAML is busted,
                    # probably a quoting issue.
                    raise AnsibleError("Parsing produced an empty object.")
            except Exception as e:
                display.vvv(traceback.print_exc())
                raise AnsibleError("module %s missing documentation (or could not parse documentation): %s\n" % (module, str(e)))

        self.pager(text)
        return 0
Example #23
0
    def run(self):

        super(DocCLI, self).run()

        plugin_type = self.options.type

        # choose plugin type
        if plugin_type == 'cache':
            loader = cache_loader
        elif plugin_type == 'callback':
            loader = callback_loader
        elif plugin_type == 'connection':
            loader = connection_loader
        elif plugin_type == 'lookup':
            loader = lookup_loader
        elif plugin_type == 'strategy':
            loader = strategy_loader
        elif plugin_type == 'vars':
            loader = vars_loader
        elif plugin_type == 'inventory':
            loader = inventory_loader
        elif plugin_type == 'shell':
            loader = shell_loader
        else:
            loader = module_loader

        # add to plugin path from command line
        if self.options.module_path:
            for path in self.options.module_path:
                if path:
                    loader.add_directory(path)

        # save only top level paths for errors
        search_paths = DocCLI.print_paths(loader)
        loader._paths = None  # reset so we can use subdirs below

        # list plugins names and filepath for type
        if self.options.list_files:
            paths = loader._get_paths()
            for path in paths:
                self.find_plugins(path, plugin_type)

            list_text = self.get_plugin_list_filenames(loader)
            self.pager(list_text)
            return 0

        # list plugins for type
        if self.options.list_dir:
            paths = loader._get_paths()
            for path in paths:
                self.find_plugins(path, plugin_type)

            self.pager(self.get_plugin_list_text(loader))
            return 0

        # process all plugins of type
        if self.options.all_plugins:
            paths = loader._get_paths()
            for path in paths:
                self.find_plugins(path, plugin_type)
            self.args = sorted(set(self.plugin_list))

        if len(self.args) == 0:
            raise AnsibleOptionsError("Incorrect options passed")

        # process command line list
        text = ''
        for plugin in self.args:
            try:
                # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
                filename = loader.find_plugin(plugin,
                                              mod_type='.py',
                                              ignore_deprecated=True,
                                              check_aliases=True)
                if filename is None:
                    display.warning("%s %s not found in:\n%s\n" %
                                    (plugin_type, plugin, search_paths))
                    continue

                if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
                    continue

                try:
                    doc, plainexamples, returndocs, metadata = get_docstring(
                        filename,
                        fragment_loader,
                        verbose=(self.options.verbosity > 0))
                except:
                    display.vvv(traceback.format_exc())
                    display.error(
                        "%s %s has a documentation error formatting or is missing documentation."
                        % (plugin_type, plugin))
                    continue

                if doc is not None:

                    # assign from other sections
                    doc['plainexamples'] = plainexamples
                    doc['returndocs'] = returndocs
                    doc['metadata'] = metadata

                    # generate extra data
                    if plugin_type == 'module':
                        # is there corresponding action plugin?
                        if plugin in action_loader:
                            doc['action'] = True
                        else:
                            doc['action'] = False
                    doc['filename'] = filename
                    doc['now_date'] = datetime.date.today().strftime(
                        '%Y-%m-%d')
                    if 'docuri' in doc:
                        doc['docuri'] = doc[plugin_type].replace('_', '-')

                    if self.options.show_snippet and plugin_type == 'module':
                        text += self.get_snippet_text(doc)
                    else:
                        text += self.get_man_text(doc)
                else:
                    # this typically means we couldn't even parse the docstring, not just that the YAML is busted,
                    # probably a quoting issue.
                    raise AnsibleError("Parsing produced an empty object.")
            except Exception as e:
                display.vvv(traceback.format_exc())
                raise AnsibleError(
                    "%s %s missing documentation (or could not parse documentation): %s\n"
                    % (plugin_type, plugin, str(e)))

        if text:
            self.pager(text)
        return 0
Example #24
0
    def run(self):
        '''
        Run the given playbook, based on the settings in the play which
        may limit the runs to serialized groups, etc.
        '''

        result = 0
        entrylist = []
        entry = {}
        try:
            for playbook_path in self._playbooks:
                pb = Playbook.load(playbook_path,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)
                # FIXME: move out of inventory self._inventory.set_playbook_basedir(os.path.realpath(os.path.dirname(playbook_path)))

                if self._tqm is None:  # we are doing a listing
                    entry = {'playbook': playbook_path}
                    entry['plays'] = []
                else:
                    # make sure the tqm has callbacks loaded
                    self._tqm.load_callbacks()
                    self._tqm.send_callback('v2_playbook_on_start', pb)

                i = 1
                plays = pb.get_plays()
                display.vv(u'%d plays in %s' %
                           (len(plays), to_text(playbook_path)))

                for play in plays:
                    if play._included_path is not None:
                        self._loader.set_basedir(play._included_path)
                    else:
                        self._loader.set_basedir(pb._basedir)

                    # clear any filters which may have been applied to the inventory
                    self._inventory.remove_restriction()

                    # Allow variables to be used in vars_prompt fields.
                    all_vars = self._variable_manager.get_vars(play=play)
                    templar = Templar(loader=self._loader, variables=all_vars)
                    setattr(play, 'vars_prompt',
                            templar.template(play.vars_prompt))

                    if play.vars_prompt:
                        for var in play.vars_prompt:
                            vname = var['name']
                            prompt = var.get("prompt", vname)
                            default = var.get("default", None)
                            private = boolean(var.get("private", True))
                            confirm = boolean(var.get("confirm", False))
                            encrypt = var.get("encrypt", None)
                            salt_size = var.get("salt_size", None)
                            salt = var.get("salt", None)

                            if vname not in self._variable_manager.extra_vars:
                                if self._tqm:
                                    self._tqm.send_callback(
                                        'v2_playbook_on_vars_prompt', vname,
                                        private, prompt, encrypt, confirm,
                                        salt_size, salt, default)
                                    play.vars[vname] = display.do_var_prompt(
                                        vname, private, prompt, encrypt,
                                        confirm, salt_size, salt, default)
                                else:  # we are either in --list-<option> or syntax check
                                    play.vars[vname] = default

                    # Post validate so any play level variables are templated
                    all_vars = self._variable_manager.get_vars(play=play)
                    templar = Templar(loader=self._loader, variables=all_vars)
                    play.post_validate(templar)

                    if self._options.syntax:
                        continue

                    if self._tqm is None:
                        # we are just doing a listing
                        entry['plays'].append(play)

                    else:
                        self._tqm._unreachable_hosts.update(
                            self._unreachable_hosts)

                        previously_failed = len(self._tqm._failed_hosts)
                        previously_unreachable = len(
                            self._tqm._unreachable_hosts)

                        break_play = False
                        # we are actually running plays
                        batches = self._get_serialized_batches(play)
                        if len(batches) == 0:
                            self._tqm.send_callback(
                                'v2_playbook_on_play_start', play)
                            self._tqm.send_callback(
                                'v2_playbook_on_no_hosts_matched')
                        for batch in batches:
                            # restrict the inventory to the hosts in the serialized batch
                            self._inventory.restrict_to_hosts(batch)
                            # and run it...
                            result = self._tqm.run(play=play)

                            # break the play if the result equals the special return code
                            if result & self._tqm.RUN_FAILED_BREAK_PLAY != 0:
                                result = self._tqm.RUN_FAILED_HOSTS
                                break_play = True

                            # check the number of failures here, to see if they're above the maximum
                            # failure percentage allowed, or if any errors are fatal. If either of those
                            # conditions are met, we break out, otherwise we only break out if the entire
                            # batch failed
                            failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts) - \
                                (previously_failed + previously_unreachable)

                            if len(batch) == failed_hosts_count:
                                break_play = True
                                break

                            # update the previous counts so they don't accumulate incorrectly
                            # over multiple serial batches
                            previously_failed += len(
                                self._tqm._failed_hosts) - previously_failed
                            previously_unreachable += len(
                                self._tqm._unreachable_hosts
                            ) - previously_unreachable

                            # save the unreachable hosts from this batch
                            self._unreachable_hosts.update(
                                self._tqm._unreachable_hosts)

                        if break_play:
                            break

                    i = i + 1  # per play

                if entry:
                    entrylist.append(entry)  # per playbook

                # send the stats callback for this playbook
                if self._tqm is not None:
                    if C.RETRY_FILES_ENABLED:
                        retries = set(self._tqm._failed_hosts.keys())
                        retries.update(self._tqm._unreachable_hosts.keys())
                        retries = sorted(retries)
                        if len(retries) > 0:
                            if C.RETRY_FILES_SAVE_PATH:
                                basedir = C.RETRY_FILES_SAVE_PATH
                            elif playbook_path:
                                basedir = os.path.dirname(
                                    os.path.abspath(playbook_path))
                            else:
                                basedir = '~/'

                            (retry_name, _) = os.path.splitext(
                                os.path.basename(playbook_path))
                            filename = os.path.join(basedir,
                                                    "%s.retry" % retry_name)
                            if self._generate_retry_inventory(
                                    filename, retries):
                                display.display(
                                    "\tto retry, use: --limit @%s\n" %
                                    filename)

                    self._tqm.send_callback('v2_playbook_on_stats',
                                            self._tqm._stats)

                # if the last result wasn't zero, break out of the playbook file name loop
                if result != 0:
                    break

            if entrylist:
                return entrylist

        finally:
            if self._tqm is not None:
                self._tqm.cleanup()
            if self._loader:
                self._loader.cleanup_all_tmp_files()

        if self._options.syntax:
            display.display("No issues encountered")
            return result

        if self._options.start_at_task and not self._tqm._start_at_done:
            display.error(
                "No matching task \"%s\" found. "
                "Note: --start-at-task can only follow static includes." %
                self._options.start_at_task)

        return result
Example #25
0
def get_docstring(filename, verbose=False):
    """
    Search for assignment of the DOCUMENTATION and EXAMPLES variables
    in the given file.
    Parse DOCUMENTATION from YAML and return the YAML doc or None
    together with EXAMPLES, as plain text.

    DOCUMENTATION can be extended using documentation fragments
    loaded by the PluginLoader from the module_docs_fragments
    directory.
    """

    data = {
        'doc': None,
        'plainexamples': None,
        'returndocs': None,
        'metadata': None
    }

    string_to_vars = {
        'DOCUMENTATION': 'doc',
        'EXAMPLES': 'plainexamples',
        'RETURN': 'returndocs',
        'ANSIBLE_METADATA': 'metadata'
    }

    try:
        M = ast.parse(''.join(open(filename)))
        try:
            display.debug('Attempt first docstring is yaml docs')
            docstring = yaml.load(M.body[0].value.s)
            for string in string_to_vars.keys():
                if string in docstring:
                    data[string_to_vars[string]] = docstring[string]
                display.debug('assigned :%s' % string_to_vars[string])
        except Exception as e:
            display.debug('failed docstring parsing: %s' % str(e))

        if 'docs' not in data or not data['docs']:
            display.debug('Fallback to vars parsing')
            for child in M.body:
                if isinstance(child, ast.Assign):
                    for t in child.targets:
                        try:
                            theid = t.id
                        except AttributeError:
                            # skip errors can happen when trying to use the normal code
                            display.warning(
                                "Failed to assign id for %s on %s, skipping" %
                                (t, filename))
                            continue

                        if theid in string_to_vars:
                            varkey = string_to_vars[theid]
                            if isinstance(child.value, ast.Dict):
                                data[varkey] = ast.literal_eval(child.value)
                            else:
                                if theid in [
                                        'DOCUMENTATION', 'ANSIBLE_METADATA'
                                ]:
                                    # string should be yaml
                                    data[varkey] = AnsibleLoader(
                                        child.value.s,
                                        file_name=filename).get_single_data()
                                else:
                                    # not yaml, should be a simple string
                                    data[varkey] = child.value.s
                            display.debug('assigned :%s' % varkey)

        # add fragments to documentation
        if data['doc']:
            add_fragments(data['doc'], filename)

        # remove version
        if data['metadata']:
            for x in ('version', 'metadata_version'):
                if x in data['metadata']:
                    del data['metadata'][x]
    except:
        display.error("unable to parse %s" % filename)
        if verbose is True:
            display.display("unable to parse %s" % filename)
            raise

    return data['doc'], data['plainexamples'], data['returndocs'], data[
        'metadata']
Example #26
0
    def default(self, arg, forceshell=False):
        """ actually runs modules """
        if arg.startswith("#"):
            return False

        if not self.options.cwd:
            display.error("No host found")
            return False

        if arg.split()[0] in self.modules:
            module = arg.split()[0]
            module_args = ' '.join(arg.split()[1:])
        else:
            module = 'shell'
            module_args = arg

        if forceshell is True:
            module = 'shell'
            module_args = arg

        self.options.module_name = module

        result = None
        try:
            check_raw = self.options.module_name in ('command', 'shell', 'script', 'raw')
            play_ds = dict(
                name = "Ansible Shell",
                hosts = self.options.cwd,
                gather_facts = 'no',
                tasks = [ dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw)))]
            )
            play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader)
        except Exception as e:
            display.error(u"Unable to build command: %s" % to_text(e))
            return False

        try:
            cb = 'minimal'  # FIXME: make callbacks configurable
            # now create a task queue manager to execute the play
            self._tqm = None
            try:
                self._tqm = TaskQueueManager(
                        inventory=self.inventory,
                        variable_manager=self.variable_manager,
                        loader=self.loader,
                        options=self.options,
                        passwords=self.passwords,
                        stdout_callback=cb,
                        run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
                        run_tree=False,
                    )

                result = self._tqm.run(play)
            finally:
                if self._tqm:
                    self._tqm.cleanup()
                if self.loader:
                    self.loader.cleanup_all_tmp_files()

            if result is None:
                display.error("No hosts found")
                return False
        except KeyboardInterrupt:
            display.error('User interrupted execution')
            return False
        except Exception as e:
            display.error(to_text(e))
            # FIXME: add traceback in very very verbose mode
            return False
Example #27
0
    def run(self):

        super(DocCLI, self).run()

        if self.options.module_path is not None:
            for i in self.options.module_path.split(os.pathsep):
                module_loader.add_directory(i)

        # list modules
        if self.options.list_dir:
            paths = module_loader._get_paths()
            for path in paths:
                self.find_modules(path)

            self.pager(self.get_module_list_text())
            return 0

        if len(self.args) == 0:
            raise AnsibleOptionsError("Incorrect options passed")

        # process command line module list
        text = ''
        for module in self.args:

            try:
                # if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
                filename = module_loader.find_plugin(module, mod_type='.py')
                if filename is None:
                    display.warning("module %s not found in %s\n" % (module, DocCLI.print_paths(module_loader)))
                    continue

                if any(filename.endswith(x) for x in self.BLACKLIST_EXTS):
                    continue

                try:
                    doc, plainexamples, returndocs = module_docs.get_docstring(filename, verbose=(self.options.verbosity > 0))
                except:
                    display.vvv(traceback.print_exc())
                    display.error("module %s has a documentation error formatting or is missing documentation\nTo see exact traceback use -vvv" % module)
                    continue

                if doc is not None:

                    all_keys = []
                    for (k,v) in iteritems(doc['options']):
                        all_keys.append(k)
                    all_keys = sorted(all_keys)
                    doc['option_keys'] = all_keys

                    doc['filename']         = filename
                    doc['docuri']           = doc['module'].replace('_', '-')
                    doc['now_date']         = datetime.date.today().strftime('%Y-%m-%d')
                    doc['plainexamples']    = plainexamples
                    doc['returndocs']       = returndocs

                    if self.options.show_snippet:
                        text += self.get_snippet_text(doc)
                    else:
                        text += self.get_man_text(doc)
                else:
                    # this typically means we couldn't even parse the docstring, not just that the YAML is busted,
                    # probably a quoting issue.
                    raise AnsibleError("Parsing produced an empty object.")
            except Exception as e:
                display.vvv(traceback.print_exc())
                raise AnsibleError("module %s missing documentation (or could not parse documentation): %s\n" % (module, str(e)))

        self.pager(text)
        return 0
Example #28
0
    def run(self):
        ''' use Runner lib to do SSH things '''

        super(PullCLI, self).run()

        # log command line
        now = datetime.datetime.now()
        display.display(now.strftime("Starting Ansible Pull at %F %T"))
        display.display(' '.join(sys.argv))

        # Build Checkout command
        # Now construct the ansible command
        node = platform.node()
        host = socket.getfqdn()
        limit_opts = 'localhost,%s,127.0.0.1' % ','.join(set([host, node, host.split('.')[0], node.split('.')[0]]))
        base_opts = '-c local '
        if self.options.verbosity > 0:
            base_opts += ' -%s' % ''.join(["v" for x in range(0, self.options.verbosity)])

        # Attempt to use the inventory passed in as an argument
        # It might not yet have been downloaded so use localhost as default
        inv_opts = self._get_inv_cli()
        if not inv_opts:
            inv_opts = " -i localhost, "

        # SCM specific options
        if self.options.module_name == 'git':
            repo_opts = "name=%s dest=%s" % (self.options.url, self.options.dest)
            if self.options.checkout:
                repo_opts += ' version=%s' % self.options.checkout

            if self.options.accept_host_key:
                repo_opts += ' accept_hostkey=yes'

            if self.options.private_key_file:
                repo_opts += ' key_file=%s' % self.options.private_key_file

            if self.options.verify:
                repo_opts += ' verify_commit=yes'

            if self.options.tracksubs:
                repo_opts += ' track_submodules=yes'

            if not self.options.fullclone:
                repo_opts += ' depth=1'
        elif self.options.module_name == 'subversion':
            repo_opts = "repo=%s dest=%s" % (self.options.url, self.options.dest)
            if self.options.checkout:
                repo_opts += ' revision=%s' % self.options.checkout
            if not self.options.fullclone:
                repo_opts += ' export=yes'
        elif self.options.module_name == 'hg':
            repo_opts = "repo=%s dest=%s" % (self.options.url, self.options.dest)
            if self.options.checkout:
                repo_opts += ' revision=%s' % self.options.checkout
        elif self.options.module_name == 'bzr':
            repo_opts = "name=%s dest=%s" % (self.options.url, self.options.dest)
            if self.options.checkout:
                repo_opts += ' version=%s' % self.options.checkout
        else:
            raise AnsibleOptionsError('Unsupported (%s) SCM module for pull, choices are: %s' % (self.options.module_name, ','.join(self.REPO_CHOICES)))

        # options common to all supported SCMS
        if self.options.clean:
            repo_opts += ' force=yes'

        path = module_loader.find_plugin(self.options.module_name)
        if path is None:
            raise AnsibleOptionsError(("module '%s' not found.\n" % self.options.module_name))

        bin_path = os.path.dirname(os.path.abspath(sys.argv[0]))
        # hardcode local and inventory/host as this is just meant to fetch the repo
        cmd = '%s/ansible %s %s -m %s -a "%s" all -l "%s"' % (bin_path, inv_opts, base_opts, self.options.module_name, repo_opts, limit_opts)

        for ev in self.options.extra_vars:
            cmd += ' -e "%s"' % ev

        # Nap?
        if self.options.sleep:
            display.display("Sleeping for %d seconds..." % self.options.sleep)
            time.sleep(self.options.sleep)

        # RUN the Checkout command
        display.debug("running ansible with VCS module to checkout repo")
        display.vvvv('EXEC: %s' % cmd)
        rc, b_out, b_err = run_cmd(cmd, live=True)

        if rc != 0:
            if self.options.force:
                display.warning("Unable to update repository. Continuing with (forced) run of playbook.")
            else:
                return rc
        elif self.options.ifchanged and b'"changed": true' not in b_out:
            display.display("Repository has not changed, quitting.")
            return 0

        playbook = self.select_playbook(self.options.dest)
        if playbook is None:
            raise AnsibleOptionsError("Could not find a playbook to run.")

        # Build playbook command
        cmd = '%s/ansible-playbook %s %s' % (bin_path, base_opts, playbook)
        if self.options.vault_password_files:
            for vault_password_file in self.options.vault_password_files:
                cmd += " --vault-password-file=%s" % vault_password_file
        if self.options.vault_ids:
            for vault_id in self.options.vault_ids:
                cmd += " --vault-id=%s" % vault_id

        for ev in self.options.extra_vars:
            cmd += ' -e "%s"' % ev
        if self.options.ask_sudo_pass or self.options.ask_su_pass or self.options.become_ask_pass:
            cmd += ' --ask-become-pass'
        if self.options.skip_tags:
            cmd += ' --skip-tags "%s"' % to_native(u','.join(self.options.skip_tags))
        if self.options.tags:
            cmd += ' -t "%s"' % to_native(u','.join(self.options.tags))
        if self.options.subset:
            cmd += ' -l "%s"' % self.options.subset
        else:
            cmd += ' -l "%s"' % limit_opts
        if self.options.check:
            cmd += ' -C'

        os.chdir(self.options.dest)

        # redo inventory options as new files might exist now
        inv_opts = self._get_inv_cli()
        if inv_opts:
            cmd += inv_opts

        # RUN THE PLAYBOOK COMMAND
        display.debug("running ansible-playbook to do actual work")
        display.debug('EXEC: %s' % cmd)
        rc, b_out, b_err = run_cmd(cmd, live=True)

        if self.options.purge:
            os.chdir('/')
            try:
                shutil.rmtree(self.options.dest)
            except Exception as e:
                display.error(u"Failed to remove %s: %s" % (self.options.dest, to_text(e)))

        return rc
Example #29
0
 def _print_exception(text):
     display.error(text, wrap_text=False)
Example #30
0
def read_docstring(filename, verbose=True, ignore_errors=True):
    """
    Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file.
    Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text.
    """

    # FIXME: Should refactor this so that we have a docstring parsing
    # function and a separate variable parsing function
    # Can have a function one higher that invokes whichever is needed
    #
    # Should look roughly like this:
    # get_plugin_doc(filename, verbose=False)
    #   documentation = extract_docstring(plugin_ast, identifier, verbose=False)
    #   if not documentation and not (filter or test):
    #       documentation = extract_variables(plugin_ast)
    #   documentation['metadata'] = extract_metadata(plugin_ast)
    #   return documentation

    data = {
        'doc': None,
        'plainexamples': None,
        'returndocs': None,
        'metadata': None
    }

    string_to_vars = {
        'DOCUMENTATION': 'doc',
        'EXAMPLES': 'plainexamples',
        'RETURN': 'returndocs',
    }

    try:
        b_module_data = open(filename, 'rb').read()
        M = ast.parse(b_module_data)
        try:
            display.debug('Attempt first docstring is yaml docs')
            docstring = yaml.load(M.body[0].value.s)
            for string in string_to_vars.keys():
                if string in docstring:
                    data[string_to_vars[string]] = docstring[string]
                display.debug('assigned :%s' % string_to_vars[string])
        except Exception as e:
            display.debug('failed docstring parsing: %s' % str(e))

        if 'docs' not in data or not data['docs']:
            display.debug('Fallback to vars parsing')
            for child in M.body:
                if isinstance(child, ast.Assign):
                    for t in child.targets:
                        try:
                            theid = t.id
                        except AttributeError:
                            # skip errors can happen when trying to use the normal code
                            display.warning(
                                "Failed to assign id for %s on %s, skipping" %
                                (t, filename))
                            continue

                        if theid in string_to_vars:
                            varkey = string_to_vars[theid]
                            if isinstance(child.value, ast.Dict):
                                data[varkey] = ast.literal_eval(child.value)
                            else:
                                if theid == 'DOCUMENTATION':
                                    # string should be yaml
                                    data[varkey] = AnsibleLoader(
                                        child.value.s,
                                        file_name=filename).get_single_data()
                                else:
                                    # not yaml, should be a simple string
                                    data[varkey] = child.value.s
                            display.debug('assigned :%s' % varkey)

        # Metadata is per-file and a dict rather than per-plugin/function and yaml
        data['metadata'] = extract_metadata(module_ast=M)[0]

        # remove version
        if data['metadata']:
            for x in ('version', 'metadata_version'):
                if x in data['metadata']:
                    del data['metadata'][x]
    except:
        if verbose:
            display.error("unable to parse %s" % filename)
        if not ignore_errors:
            raise

    return data
Example #31
0
def read_docstring(filename, verbose=True, ignore_errors=True):
    """
    Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file.
    Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text.
    """

    data = {
        'doc': None,
        'plainexamples': None,
        'returndocs': None,
        'metadata': None
    }

    string_to_vars = {
        'DOCUMENTATION': 'doc',
        'EXAMPLES': 'plainexamples',
        'RETURN': 'returndocs',
    }

    try:
        b_module_data = open(filename, 'rb').read()
        M = ast.parse(b_module_data)

        for child in M.body:
            if isinstance(child, ast.Assign):
                for t in child.targets:
                    try:
                        theid = t.id
                    except AttributeError:
                        # skip errors can happen when trying to use the normal code
                        display.warning("Failed to assign id for %s on %s, skipping" % (t, filename))
                        continue

                    if theid in string_to_vars:
                        varkey = string_to_vars[theid]
                        if isinstance(child.value, ast.Dict):
                            data[varkey] = ast.literal_eval(child.value)
                        else:
                            if theid == 'DOCUMENTATION':
                                # string should be yaml
                                data[varkey] = AnsibleLoader(child.value.s, file_name=filename).get_single_data()
                            else:
                                # not yaml, should be a simple string
                                data[varkey] = child.value.s
                        display.debug('assigned :%s' % varkey)

        # Metadata is per-file and a dict rather than per-plugin/function and yaml
        data['metadata'] = extract_metadata(module_ast=M)[0]

        # remove version
        if data['metadata']:
            for x in ('version', 'metadata_version'):
                if x in data['metadata']:
                    del data['metadata'][x]
    except:
        if verbose:
            display.error("unable to parse %s" % filename)
        if not ignore_errors:
            raise

    return data
Example #32
0
    def run(self):

        super(DocCLI, self).run()

        plugin_type = self.options.type

        # choose plugin type
        if plugin_type == 'cache':
            loader = cache_loader
        elif plugin_type == 'callback':
            loader = callback_loader
        elif plugin_type == 'connection':
            loader = connection_loader
        elif plugin_type == 'lookup':
            loader = lookup_loader
        elif plugin_type == 'strategy':
            loader = strategy_loader
        elif plugin_type == 'vars':
            loader = vars_loader
        elif plugin_type == 'inventory':
            loader = PluginLoader('InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
        else:
            loader = module_loader

        # add to plugin path from command line
        if self.options.module_path is not None:
            for i in self.options.module_path.split(os.pathsep):
                loader.add_directory(i)

        # save only top level paths for errors
        search_paths = DocCLI.print_paths(loader)
        loader._paths = None  # reset so we can use subdirs below

        # list plugins for type
        if self.options.list_dir:
            paths = loader._get_paths()
            for path in paths:
                self.find_plugins(path, plugin_type)

            self.pager(self.get_plugin_list_text(loader))
            return 0

        # process all plugins of type
        if self.options.all_plugins:
            paths = loader._get_paths()
            for path in paths:
                self.find_plugins(path, plugin_type)
            self.args = sorted(set(self.plugin_list))

        if len(self.args) == 0:
            raise AnsibleOptionsError("Incorrect options passed")

        # process command line list
        text = ''
        for plugin in self.args:

            try:
                # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
                filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True)
                if filename is None:
                    display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths))
                    continue

                if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
                    continue

                try:
                    doc, plainexamples, returndocs, metadata = plugin_docs.get_docstring(filename, verbose=(self.options.verbosity > 0))
                except:
                    display.vvv(traceback.format_exc())
                    display.error("%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin))
                    continue

                if doc is not None:

                    # assign from other sections
                    doc['plainexamples'] = plainexamples
                    doc['returndocs'] = returndocs
                    doc['metadata'] = metadata

                    # generate extra data
                    if plugin_type == 'module':
                        # is there corresponding action plugin?
                        if plugin in action_loader:
                            doc['action'] = True
                        else:
                            doc['action'] = False
                    doc['filename'] = filename
                    doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
                    if 'docuri' in doc:
                        doc['docuri'] = doc[plugin_type].replace('_', '-')

                    if self.options.show_snippet and plugin_type == 'module':
                        text += self.get_snippet_text(doc)
                    else:
                        text += self.get_man_text(doc)
                else:
                    # this typically means we couldn't even parse the docstring, not just that the YAML is busted,
                    # probably a quoting issue.
                    raise AnsibleError("Parsing produced an empty object.")
            except Exception as e:
                display.vvv(traceback.format_exc())
                raise AnsibleError("%s %s missing documentation (or could not parse documentation): %s\n" % (plugin_type, plugin, str(e)))

        if text:
            self.pager(text)
        return 0
Example #33
0
def read_docstring(filename, verbose=True, ignore_errors=True):
    """
    Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file.
    Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text.
    """

    data = {
        'doc': None,
        'plainexamples': None,
        'returndocs': None,
        'metadata': None,
        'seealso': None,
    }

    string_to_vars = {
        'DOCUMENTATION': 'doc',
        'EXAMPLES': 'plainexamples',
        'RETURN': 'returndocs',
    }

    try:
        b_module_data = open(filename, 'rb').read()
        M = ast.parse(b_module_data)

        for child in M.body:
            if isinstance(child, ast.Assign):
                for t in child.targets:
                    try:
                        theid = t.id
                    except AttributeError:
                        # skip errors can happen when trying to use the normal code
                        display.warning(
                            "Failed to assign id for %s on %s, skipping" %
                            (t, filename))
                        continue

                    if theid in string_to_vars:
                        varkey = string_to_vars[theid]
                        if isinstance(child.value, ast.Dict):
                            data[varkey] = ast.literal_eval(child.value)
                        else:
                            if theid == 'DOCUMENTATION':
                                # string should be yaml
                                data[varkey] = AnsibleLoader(
                                    child.value.s,
                                    file_name=filename).get_single_data()
                            else:
                                # not yaml, should be a simple string
                                data[varkey] = to_text(child.value.s)
                        display.debug('assigned :%s' % varkey)

        # Metadata is per-file and a dict rather than per-plugin/function and yaml
        data['metadata'] = extract_metadata(module_ast=M)[0]

        # remove version
        if data['metadata']:
            for x in ('version', 'metadata_version'):
                if x in data['metadata']:
                    del data['metadata'][x]
    except:
        if verbose:
            display.error("unable to parse %s" % filename)
        if not ignore_errors:
            raise

    return data
Example #34
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result = self._tqm.RUN_OK
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                display.debug("getting the remaining hosts for this loop")
                hosts_left = [host for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts]
                display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest   = False
                choose_step = True

                # flag set if task is set to any_errors_fatal
                any_errors_fatal = False

                results = []
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    if self._tqm._terminated:
                        break

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action, class_only=True)
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        action = None

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run(host):
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            display.debug("'%s' skipped because role has already run" % task)
                            continue

                    if task.action == 'meta':
                        # for the linear strategy, we run meta tasks just once and for
                        # all hosts currently being iterated over rather than one host
                        results.extend(self._execute_meta(task, play_context, iterator))
                        if task.args.get('_raw_params', None) != 'noop':
                            run_once = True
                    else:
                        # handle step if needed, skip meta actions as they are used internally
                        if self._step and choose_step:
                            if self._take_step(task):
                                choose_step = False
                            else:
                                skip_rest = True
                                break

                        display.debug("getting variables")
                        task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task)
                        self.add_tqm_variables(task_vars, play=iterator._play)
                        templar = Templar(loader=self._loader, variables=task_vars)
                        display.debug("done getting variables")

                        run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)

                        if (task.any_errors_fatal or run_once) and not task.ignore_errors:
                            any_errors_fatal = True

                        if not callback_sent:
                            display.debug("sending task start callback, copying the task so we can template it temporarily")
                            saved_name = task.name
                            display.debug("done copying, going to template now")
                            try:
                                task.name = to_text(templar.template(task.name, fail_on_undefined=False), nonstring='empty')
                                display.debug("done templating")
                            except:
                                # just ignore any errors during task name templating,
                                # we don't care if it just shows the raw name
                                display.debug("templating failed for some reason")
                                pass
                            display.debug("here goes the callback...")
                            self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
                            task.name = saved_name
                            callback_sent = True
                            display.debug("sending task start callback")

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, play_context)
                        del task_vars

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                    results += self._process_pending_results(iterator, max_passes=max(1, int(len(self._tqm._workers) * 0.1)))

                # go to next host/task group
                if skip_rest:
                    continue

                display.debug("done queuing things up, now waiting for results queue to drain")
                if self._pending_results > 0:
                    results += self._wait_on_pending_results(iterator)
                host_results.extend(results)

                all_role_blocks = []
                for hr in results:
                    # handle include_role
                    if hr._task.action == 'include_role':
                        loop_var = None
                        if hr._task.loop:
                            loop_var = 'item'
                            if hr._task.loop_control:
                                loop_var = hr._task.loop_control.loop_var or 'item'
                            include_results = hr._result.get('results', [])
                        else:
                            include_results = [ hr._result ]

                        for include_result in include_results:
                            if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result and include_result['failed']:
                                continue

                            display.debug("generating all_blocks data for role")
                            new_ir = hr._task.copy()
                            new_ir.vars.update(include_result.get('include_variables', dict()))
                            if loop_var and loop_var in include_result:
                                new_ir.vars[loop_var] = include_result[loop_var]

                            all_role_blocks.extend(new_ir.get_block_list(play=iterator._play, variable_manager=self._variable_manager, loader=self._loader))

                if len(all_role_blocks) > 0:
                    for host in hosts_left:
                        iterator.add_tasks(host, all_role_blocks)

                try:
                    included_files = IncludedFile.process_include_results(
                        host_results,
                        self._tqm,
                        iterator=iterator,
                        inventory=self._inventory,
                        loader=self._loader,
                        variable_manager=self._variable_manager
                    )
                except AnsibleError as e:
                    # this is a fatal error, so we abort here regardless of block state
                    return self._tqm.RUN_ERROR

                include_failure = False
                if len(included_files) > 0:
                    display.debug("we have included files to process")
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    display.debug("generating all_blocks data")
                    all_blocks = dict((host, []) for host in hosts_left)
                    display.debug("done generating all_blocks data")
                    for included_file in included_files:
                        display.debug("processing included file: %s" % included_file._filename)
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(included_file, iterator=iterator)

                            display.debug("iterating over new_blocks loaded from include file")
                            for new_block in new_blocks:
                                task_vars = self._variable_manager.get_vars(
                                    loader=self._loader,
                                    play=iterator._play,
                                    task=included_file._task,
                                )
                                display.debug("filtering new block on tags")
                                final_block = new_block.filter_tagged_tasks(play_context, task_vars)
                                display.debug("done filtering new block on tags")

                                noop_block = Block(parent_block=task._parent)
                                noop_block.block  = [noop_task for t in new_block.block]
                                noop_block.always = [noop_task for t in new_block.always]
                                noop_block.rescue = [noop_task for t in new_block.rescue]

                                for host in hosts_left:
                                    if host in included_file._hosts:
                                        all_blocks[host].append(final_block)
                                    else:
                                        all_blocks[host].append(noop_block)
                            display.debug("done iterating over new_blocks loaded from include file")

                        except AnsibleError as e:
                            for host in included_file._hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                            display.error(to_text(e), wrap_text=False)
                            include_failure = True
                            continue

                    # finally go through all of the hosts and append the
                    # accumulated blocks to their list of tasks
                    display.debug("extending task lists for all hosts with included blocks")

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                    display.debug("done extending task lists")
                    display.debug("done processing included files")

                display.debug("results queue empty")

                display.debug("checking for any_errors_fatal")
                failed_hosts = []
                unreachable_hosts = []
                for res in results:
                    if res.is_failed():
                        failed_hosts.append(res._host.name)
                    elif res.is_unreachable():
                        unreachable_hosts.append(res._host.name)

                # if any_errors_fatal and we had an error, mark all hosts as failed
                if any_errors_fatal and (len(failed_hosts) > 0 or len(unreachable_hosts) > 0):
                    for host in hosts_left:
                        (s, _) = iterator.get_next_task_for_host(host, peek=True)
                        if s.run_state != iterator.ITERATING_RESCUE or \
                           s.run_state == iterator.ITERATING_RESCUE and s.fail_state & iterator.FAILED_RESCUE != 0:
                            self._tqm._failed_hosts[host.name] = True
                            result |= self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for any_errors_fatal")

                display.debug("checking for max_fail_percentage")
                if iterator._play.max_fail_percentage is not None and len(results) > 0:
                    percentage = iterator._play.max_fail_percentage / 100.0

                    if (len(self._tqm._failed_hosts) / len(results)) > percentage:
                        for host in hosts_left:
                            # don't double-mark hosts, or the iterator will potentially
                            # fail them out of the rescue/always states
                            if host.name not in failed_hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                        self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
                        result |= self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for max_fail_percentage")

                display.debug("checking to see if all hosts have failed and the running result is not ok")
                if result != self._tqm.RUN_OK and len(self._tqm._failed_hosts) >= len(hosts_left):
                    display.debug("^ not ok, so returning result now")
                    self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
                    return result
                display.debug("done checking to see if all hosts have failed")

            except (IOError, EOFError) as e:
                display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return self._tqm.RUN_UNKNOWN_ERROR

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)
Example #35
0
    def get_fixture_file(self, function, op, argvals=None, connection=None, cmd=None):
        '''Use the data to generate a fixture filename for the caller'''

        display.v('#=================> GET FIXTURE FILE')

        # read the current task info from the callback
        task_info = self.callback_reader.get_current_task()
        self.current_task_number = task_info['number']
        self.current_task_info = task_info.copy()

        '''
        if hasattr(connection, 'task_uuid'):
            self.current_task_number = connection.task_uuid.split('-')[-1]
        else:
            import epdb; epdb.st()
        '''

        # set the top level directory for the task fixtures
        taskdir = os.path.join(self.fixture_dir, str(self.current_task_number))
        try:
            if not os.path.isdir(taskdir):
                os.makedirs(taskdir)
        except OSError as e:
            # fork race conditions
            pass

        # https://github.com/ansible/ansible/blob/devel/lib/ansible/executor/task_executor.py#L797
        # connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin, ansible_playbook_pid=to_text(os.getppid()))

        # use connection to determine the remote host
        '''
        if hasattr(connection, 'host'):
            hostdir = os.path.join(taskdir, connection.host)
        else:
            hn = connection.get_option('_original_host')
            hostdir = os.path.join(taskdir, 'localhost[%s]' % hn)
        '''
        # depends on https://github.com/ansible/ansible/pull/38818
        if not hasattr(connection, 'host'):
            hn = connection.get_option('_original_host')
        else:
            if connection.host == 'localhost':
                hn = connection.get_option('_original_host')
            else:
                hn = connection.host
        #import epdb; epdb.st()
        if not hn:
            hostdir = os.path.join(taskdir, connection.host)
        hostdir = os.path.join(taskdir, hn)

        # ensure we have a place to read and write the fixtures for the host
        if not os.path.isdir(hostdir):
            os.makedirs(hostdir)

        # fixtures are timestamped for easier visual sorting
        ts = datetime.datetime.strftime(
            datetime.datetime.now(),
            '%Y-%m-%d_%H-%M-%S-%f'
        )

        # this is what needs to be returned so the caller knows what to
        # read or write for this connection.
        filen = None

        if op == 'record':
            display.vvvv('WRITE TASKID: %s' % self.current_task_number)
            display.vvvv('WRITE FUNCTION: %s' % function)
            display.vvvv('WRITE OP: %s' % op)

            prefix = os.path.join(hostdir, ts + '_' + function + '_')
            existing = glob.glob('%s/*.json' % hostdir)
            existing = [x for x in existing if function in x]
            existing = [x for x in existing if x.endswith('.json')]
            existing = [x.replace('.json', '') for x in existing]
            existing = [x.split('_')[-1] for x in existing]
            existing = sorted([int(x) for x in existing])

            _prefix = os.path.join(hostdir, ts + '_' + function + '_')
            if not existing:
                filen = _prefix + '1.json'
            else:
                filen = _prefix + '%s.json' % (existing[-1] + 1)

        elif op == 'read':
            display.vvvv('[%s] READ TASKID: %s' % (hn, self.current_task_number))
            display.vvvv('[%s] READ FUNCTION: %s' % (hn, function))
            display.vvvv('[%s] READ OP: %s' % (hn, op))

            existing = glob.glob('%s/*.json' % hostdir)
            existing = [x for x in existing if function in x]
            existing = [x for x in existing if x.endswith('.json')]
            display.vvvv('[%s] 1. possible choices: %s' % (hn, existing))

            if cmd:
                existing = sorted(existing)
                candidates = []
                for ef in existing:
                    with open(ef, 'r') as f:
                        jdata = json.loads(f.read())
                    if 'command' not in jdata:
                        continue
                    if jdata['command'][-1] == cmd[-1]:
                        candidates.append(ef)
                        continue
                    if jdata['command'][-1][:30] == cmd[-1][:30]:
                        candidates.append(ef)
                        continue

                if candidates:
                    existing = candidates[:]
                display.vvvv('[%s] 2. possible choices: %s' % (hn, existing))

            existing = [x.replace('.json', '') for x in existing]
            existing = [x.split('_')[-1] for x in existing]
            existing = sorted([int(x) for x in existing], reverse=True)
            display.vvvv('[%s] 3. possible choices: %s' % (hn, existing))

            # use the last file to increment for this call
            lastf = self.fixture_logger.get_last_file(self.current_task_number, hostdir, function)
            display.v('[%s] READ LASTFILE: %s' % (hn, lastf))

            # increment the id of the file
            if lastf is None:
                fileid = 1
            else:
                fileid = lastf.split('_')[-1].replace('.json', '')
                fileid = int(fileid)
                fileid += 1
            display.vvvv('[' + hn + '] READ FID: ' + str(fileid))

            # try to find the file with the new id
            suffix = '_%s_%s.json' % (function, fileid)
            _existing = glob.glob('%s/*%s' % (hostdir, suffix))
            display.v('[%s] READ _EXISTING: %s' % (hn, _existing))

            # openshift hackaround - just send the last one again ... ?
            if not _existing:
                _existing = [lastf]

            if len(_existing) == 1:
                filen = _existing[-1]
            else:
                display.error('[%s] _existing: %s' % (hn, _existing))
                breakhost = os.environ.get('ANSIBLE_VCR_HOST_BREAK')
                if not breakhost or breakhost == hn:
                    import epdb; epdb.st()
                filen = None

            self.fixture_logger.set_last_file(self.current_task_number, hostdir, function, filen)

        display.vvvv('[' + hn + '] RETURN FILE: ' + str(filen))
        return filen
Example #36
0
    def default(self, arg, forceshell=False):
        """ actually runs modules """
        if arg.startswith("#"):
            return False

        if not self.options.cwd:
            display.error("No host found")
            return False

        if arg.split()[0] in self.modules:
            module = arg.split()[0]
            module_args = ' '.join(arg.split()[1:])
        else:
            module = 'shell'
            module_args = arg

        if forceshell is True:
            module = 'shell'
            module_args = arg

        self.options.module_name = module

        result = None
        try:
            check_raw = self.options.module_name in ('command', 'shell',
                                                     'script', 'raw')
            play_ds = dict(
                name="Ansible Shell",
                hosts=self.options.cwd,
                gather_facts='no',
                tasks=[
                    dict(action=dict(module=module,
                                     args=parse_kv(module_args,
                                                   check_raw=check_raw)))
                ])
            play = Play().load(play_ds,
                               variable_manager=self.variable_manager,
                               loader=self.loader)
        except Exception as e:
            display.error(u"Unable to build command: %s" % to_text(e))
            return False

        try:
            cb = 'minimal'  # FIXME: make callbacks configurable
            # now create a task queue manager to execute the play
            self._tqm = None
            try:
                self._tqm = TaskQueueManager(
                    inventory=self.inventory,
                    variable_manager=self.variable_manager,
                    loader=self.loader,
                    options=self.options,
                    passwords=self.passwords,
                    stdout_callback=cb,
                    run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
                    run_tree=False,
                )

                result = self._tqm.run(play)
            finally:
                if self._tqm:
                    self._tqm.cleanup()
                if self.loader:
                    self.loader.cleanup_all_tmp_files()

            if result is None:
                display.error("No hosts found")
                return False
        except KeyboardInterrupt:
            display.error('User interrupted execution')
            return False
        except Exception as e:
            display.error(to_text(e))
            # FIXME: add traceback in very very verbose mode
            return False
Example #37
0
    def run(self, iterator, play_context):
        """
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        """

        # iteratate over each task, while there is one left to run
        result = True
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                display.debug("getting the remaining hosts for this loop")
                hosts_left = [
                    host
                    for host in self._inventory.get_hosts(iterator._play.hosts)
                    if host.name not in self._tqm._unreachable_hosts
                ]
                display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest = False
                choose_step = True

                results = []
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    if self._tqm._terminated:
                        break

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action, class_only=True)
                        if task.run_once or getattr(action, "BYPASS_HOST_LOOP", False):
                            run_once = True
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        pass

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run(host):
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if (
                            task._role._metadata is None
                            or task._role._metadata
                            and not task._role._metadata.allow_duplicates
                        ):
                            display.debug("'%s' skipped because role has already run" % task)
                            continue

                    if task.action == "meta":
                        self._execute_meta(task, play_context, iterator)
                    else:
                        # handle step if needed, skip meta actions as they are used internally
                        if self._step and choose_step:
                            if self._take_step(task):
                                choose_step = False
                            else:
                                skip_rest = True
                                break

                        display.debug("getting variables")
                        task_vars = self._variable_manager.get_vars(
                            loader=self._loader, play=iterator._play, host=host, task=task
                        )
                        self.add_tqm_variables(task_vars, play=iterator._play)
                        templar = Templar(loader=self._loader, variables=task_vars)
                        display.debug("done getting variables")

                        if not callback_sent:
                            display.debug(
                                "sending task start callback, copying the task so we can template it temporarily"
                            )
                            saved_name = task.name
                            display.debug("done copying, going to template now")
                            try:
                                task.name = text_type(templar.template(task.name, fail_on_undefined=False))
                                display.debug("done templating")
                            except:
                                # just ignore any errors during task name templating,
                                # we don't care if it just shows the raw name
                                display.debug("templating failed for some reason")
                                pass
                            display.debug("here goes the callback...")
                            self._tqm.send_callback("v2_playbook_on_task_start", task, is_conditional=False)
                            task.name = saved_name
                            callback_sent = True
                            display.debug("sending task start callback")

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, play_context)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                    results += self._process_pending_results(iterator, one_pass=True)

                # go to next host/task group
                if skip_rest:
                    continue

                display.debug("done queuing things up, now waiting for results queue to drain")
                results += self._wait_on_pending_results(iterator)
                host_results.extend(results)

                if not work_to_do and len(iterator.get_failed_hosts()) > 0:
                    display.debug("out of hosts to run on")
                    self._tqm.send_callback("v2_playbook_on_no_hosts_remaining")
                    result = False
                    break

                try:
                    included_files = IncludedFile.process_include_results(
                        host_results,
                        self._tqm,
                        iterator=iterator,
                        inventory=self._inventory,
                        loader=self._loader,
                        variable_manager=self._variable_manager,
                    )
                except AnsibleError as e:
                    return False

                if len(included_files) > 0:
                    display.debug("we have included files to process")
                    noop_task = Task()
                    noop_task.action = "meta"
                    noop_task.args["_raw_params"] = "noop"
                    noop_task.set_loader(iterator._play._loader)

                    display.debug("generating all_blocks data")
                    all_blocks = dict((host, []) for host in hosts_left)
                    display.debug("done generating all_blocks data")
                    for included_file in included_files:
                        display.debug("processing included file: %s" % included_file._filename)
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(included_file, iterator=iterator)

                            display.debug("iterating over new_blocks loaded from include file")
                            for new_block in new_blocks:
                                task_vars = self._variable_manager.get_vars(
                                    loader=self._loader, play=iterator._play, task=included_file._task
                                )
                                display.debug("filtering new block on tags")
                                final_block = new_block.filter_tagged_tasks(play_context, task_vars)
                                display.debug("done filtering new block on tags")

                                noop_block = Block(parent_block=task._block)
                                noop_block.block = [noop_task for t in new_block.block]
                                noop_block.always = [noop_task for t in new_block.always]
                                noop_block.rescue = [noop_task for t in new_block.rescue]

                                for host in hosts_left:
                                    if host in included_file._hosts:
                                        all_blocks[host].append(final_block)
                                    else:
                                        all_blocks[host].append(noop_block)
                            display.debug("done iterating over new_blocks loaded from include file")

                        except AnsibleError as e:
                            for host in included_file._hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                            display.error(e, wrap_text=False)
                            continue

                    # finally go through all of the hosts and append the
                    # accumulated blocks to their list of tasks
                    display.debug("extending task lists for all hosts with included blocks")

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                    display.debug("done extending task lists")
                    display.debug("done processing included files")

                display.debug("results queue empty")
            except (IOError, EOFError) as e:
                display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return False

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)
Example #38
0
    def run(self, tmp=None, task_vars=None):
        # we must use vars, since task_vars will have un-processed variables
        host_vars = task_vars['vars']
        host = host_vars['ansible_hostname']
        mode = self._task.args.get('mode', 'permissive')

        self._supports_check_mode = False # XXX ?
        self._supports_async = True

        result = {}
        result['_ansible_verbose_always'] = True

        try:
            notario_store["groups"] = host_vars["groups"]
            notario_store["containerized_deployment"] = host_vars["containerized_deployment"]
            notario.validate(host_vars, install_options, defined_keys=True)

            if host_vars["ceph_origin"] == "repository" and not host_vars["containerized_deployment"]:
                notario.validate(host_vars, ceph_origin_repository, defined_keys=True)

                if host_vars["ceph_repository"] == "community":
                    notario.validate(host_vars, ceph_repository_community, defined_keys=True)

                if host_vars["ceph_repository"] == "rhcs":
                    notario.validate(host_vars, ceph_repository_rhcs, defined_keys=True)

                if host_vars["ceph_repository"] == "dev":
                    notario.validate(host_vars, ceph_repository_dev, defined_keys=True)

            # store these values because one must be defined and the validation method
            # will need access to all three through the store
            notario_store["monitor_address"] = host_vars.get("monitor_address", None)
            notario_store["monitor_address_block"] = host_vars.get("monitor_address_block", None)
            notario_store["monitor_interface"] = host_vars.get("monitor_interface", None)

            if host_vars["mon_group_name"] in host_vars["group_names"]:
                notario.validate(host_vars, monitor_options, defined_keys=True)

            notario_store["radosgw_address"] = host_vars.get("radosgw_address", None)
            notario_store["radosgw_address_block"] = host_vars.get("radosgw_address_block", None)
            notario_store["radosgw_interface"] = host_vars.get("radosgw_interface", None)

            if host_vars["rgw_group_name"] in host_vars["group_names"]:
                notario.validate(host_vars, rados_options, defined_keys=True)

            # validate osd scenario setup
            if host_vars["osd_group_name"] in host_vars["group_names"]:
                notario.validate(host_vars, osd_options, defined_keys=True)
                notario_store['osd_objectstore'] = host_vars["osd_objectstore"]
                if host_vars["osd_scenario"] == "collocated":
                    if not host_vars.get("osd_auto_discovery", False):
                        notario.validate(host_vars, collocated_osd_scenario, defined_keys=True)

                if host_vars["osd_scenario"] == "non-collocated":
                    notario.validate(host_vars, non_collocated_osd_scenario, defined_keys=True)

                if host_vars["osd_scenario"] == "lvm":
                    if host_vars.get("devices"):
                        notario.validate(host_vars, lvm_batch_scenario, defined_keys=True)
                    elif notario_store['osd_objectstore'] == 'filestore':
                        notario.validate(host_vars, lvm_filestore_scenario, defined_keys=True)
                    elif notario_store['osd_objectstore'] == 'bluestore':
                        notario.validate(host_vars, lvm_bluestore_scenario, defined_keys=True)

        except Invalid as error:
            display.vvv("Notario Failure: %s" % str(error))
            msg = ""
            if error.path:
                msg = "[{}] Validation failed for variable: {}".format(host, error.path[0])
                display.error(msg)
                reason = "[{}] Reason: {}".format(host, error.reason)
            else:
                reason = "[{}] Reason: {}".format(host, str(error))
            given = ""
            try:
                if "schema is missing" not in error.message:
                    for i in range(0, len(error.path)):
                        if i == 0:
                            given = "[{}] Given value for {}".format(
                                    host, error.path[0])
                        else:
                            given = given + ": {}".format(error.path[i])
                    if given:
                        display.error(given)
                else:
                    given = ""
                    reason = "[{}] Reason: {}".format(host, error.message)
            except KeyError:
                given = ""
            display.error(reason)
            result['failed'] = mode == 'strict'
            result['msg'] = "\n".join([s for s in (msg, reason, given) if len(s) > 0])
            result['stderr_lines'] = result['msg'].split('\n')


        return result
Example #39
0
def get_docstring(filename, verbose=False):
    """
    Search for assignment of the DOCUMENTATION and EXAMPLES variables
    in the given file.
    Parse DOCUMENTATION from YAML and return the YAML doc or None
    together with EXAMPLES, as plain text.

    DOCUMENTATION can be extended using documentation fragments
    loaded by the PluginLoader from the module_docs_fragments
    directory.
    """

    doc = None
    plainexamples = None
    returndocs = None

    try:
        # Thank you, Habbie, for this bit of code :-)
        M = ast.parse(''.join(open(filename)))
        for child in M.body:
            if isinstance(child, ast.Assign):
                for t in child.targets:
                    try:
                        theid = t.id
                    except AttributeError as e:
                        # skip errors can happen when trying to use the normal code
                        display.warning(
                            "Failed to assign id for %t on %s, skipping" %
                            (t, filename))
                        continue

                    if 'DOCUMENTATION' in theid:
                        doc = yaml.safe_load(child.value.s)
                        fragments = doc.get('extends_documentation_fragment',
                                            [])

                        if isinstance(fragments, basestring):
                            fragments = [fragments]

                        # Allow the module to specify a var other than DOCUMENTATION
                        # to pull the fragment from, using dot notation as a separator
                        for fragment_slug in fragments:
                            fragment_slug = fragment_slug.lower()
                            if '.' in fragment_slug:
                                fragment_name, fragment_var = fragment_slug.split(
                                    '.', 1)
                                fragment_var = fragment_var.upper()
                            else:
                                fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION'

                            fragment_class = fragment_loader.get(fragment_name)
                            assert fragment_class is not None

                            fragment_yaml = getattr(fragment_class,
                                                    fragment_var, '{}')
                            fragment = yaml.safe_load(fragment_yaml)

                            if fragment.has_key('notes'):
                                notes = fragment.pop('notes')
                                if notes:
                                    if not doc.has_key('notes'):
                                        doc['notes'] = []
                                    doc['notes'].extend(notes)

                            if 'options' not in fragment.keys():
                                raise Exception(
                                    "missing options in fragment, possibly misformatted?"
                                )

                            for key, value in fragment.items():
                                if not doc.has_key(key):
                                    doc[key] = value
                                else:
                                    if isinstance(doc[key], MutableMapping):
                                        doc[key].update(value)
                                    elif isinstance(doc[key], MutableSet):
                                        doc[key].add(value)
                                    elif isinstance(doc[key], MutableSequence):
                                        doc[key] = sorted(
                                            frozenset(doc[key] + value))
                                    else:
                                        raise Exception(
                                            "Attempt to extend a documentation fragement of unknown type"
                                        )

                    elif 'EXAMPLES' in theid:
                        plainexamples = child.value.s[
                            1:]  # Skip first empty line

                    elif 'RETURN' in theid:
                        returndocs = child.value.s[1:]
    except:
        display.error("unable to parse %s" % filename)
        if verbose == True:
            display.display("unable to parse %s" % filename)
            raise
    return doc, plainexamples, returndocs
Example #40
0
def get_docstring(filename, verbose=False):
    """
    Search for assignment of the DOCUMENTATION and EXAMPLES variables
    in the given file.
    Parse DOCUMENTATION from YAML and return the YAML doc or None
    together with EXAMPLES, as plain text.

    DOCUMENTATION can be extended using documentation fragments
    loaded by the PluginLoader from the module_docs_fragments
    directory.
    """

    doc = None
    plainexamples = None
    returndocs = None
    metadata = None

    try:
        # Thank you, Habbie, for this bit of code :-)
        M = ast.parse(''.join(open(filename)))
        for child in M.body:
            if isinstance(child, ast.Assign):
                for t in child.targets:
                    try:
                        theid = t.id
                    except AttributeError as e:
                        # skip errors can happen when trying to use the normal code
                        display.warning(
                            "Failed to assign id for %s on %s, skipping" %
                            (t, filename))
                        continue

                    if 'DOCUMENTATION' == theid:
                        doc = AnsibleLoader(
                            child.value.s,
                            file_name=filename).get_single_data()
                        fragments = doc.get('extends_documentation_fragment',
                                            [])

                        if isinstance(fragments, string_types):
                            fragments = [fragments]

                        # Allow the module to specify a var other than DOCUMENTATION
                        # to pull the fragment from, using dot notation as a separator
                        for fragment_slug in fragments:
                            fragment_slug = fragment_slug.lower()
                            if '.' in fragment_slug:
                                fragment_name, fragment_var = fragment_slug.split(
                                    '.', 1)
                                fragment_var = fragment_var.upper()
                            else:
                                fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION'

                            fragment_class = fragment_loader.get(fragment_name)
                            assert fragment_class is not None

                            fragment_yaml = getattr(fragment_class,
                                                    fragment_var, '{}')
                            fragment = AnsibleLoader(
                                fragment_yaml,
                                file_name=filename).get_single_data()

                            if 'notes' in fragment:
                                notes = fragment.pop('notes')
                                if notes:
                                    if 'notes' not in doc:
                                        doc['notes'] = []
                                    doc['notes'].extend(notes)

                            if 'options' not in fragment:
                                raise Exception(
                                    "missing options in fragment (%s), possibly misformatted?: %s"
                                    % (fragment_name, filename))

                            for key, value in fragment.items():
                                if key not in doc:
                                    doc[key] = value
                                else:
                                    if isinstance(doc[key], MutableMapping):
                                        doc[key].update(value)
                                    elif isinstance(doc[key], MutableSet):
                                        doc[key].add(value)
                                    elif isinstance(doc[key], MutableSequence):
                                        doc[key] = sorted(
                                            frozenset(doc[key] + value))
                                    else:
                                        raise Exception(
                                            "Attempt to extend a documentation fragement (%s) of unknown type: %s"
                                            % (fragment_name, filename))

                    elif 'EXAMPLES' == theid:
                        plainexamples = child.value.s[
                            1:]  # Skip first empty line

                    elif 'RETURN' == theid:
                        returndocs = child.value.s[1:]

                    elif 'ANSIBLE_METADATA' == theid:
                        metadata = child.value
                        if type(metadata).__name__ == 'Dict':
                            metadata = ast.literal_eval(child.value)
                        else:
                            # try yaml loading
                            metadata = AnsibleLoader(
                                child.value.s,
                                file_name=filename).get_single_data()

                        if not isinstance(metadata, dict):
                            display.warning(
                                "Invalid metadata detected in %s, using defaults"
                                % filename)
                            metadata = {
                                'status': ['preview'],
                                'supported_by': 'community',
                                'version': '1.0'
                            }

    except:
        display.error("unable to parse %s" % filename)
        if verbose == True:
            display.display("unable to parse %s" % filename)
            raise

    if not metadata:
        metadata = dict()

    # ensure metadata defaults
    # FUTURE: extract this into its own class for use by runtime metadata
    metadata['version'] = metadata.get('version', '1.0')
    metadata['status'] = metadata.get('status', ['preview'])
    metadata['supported_by'] = metadata.get('supported_by', 'community')

    return doc, plainexamples, returndocs, metadata
Example #41
0
from ansible.plugins.action import ActionBase
from distutils.version import LooseVersion
from ansible.module_utils.six import string_types
from ansible.errors import AnsibleUndefinedVariable

try:
    from __main__ import display
except ImportError:
    from ansible.utils.display import Display
    display = Display()

try:
    import notario
except ImportError:
    msg = "The python-notario library is missing. Please install it on the node you are running ceph-ansible to continue."  # noqa E501
    display.error(msg)
    raise SystemExit(msg)

if LooseVersion(notario.__version__) < LooseVersion("0.0.13"):
    msg = "The python-notario libary has an incompatible version. Version >= 0.0.13 is needed, current version: %s" % notario.__version__
    display.error(msg)
    raise SystemExit(msg)

from notario.exceptions import Invalid
from notario.validators import types, chainable, iterables
from notario.decorators import optional
from notario.store import store as notario_store


CEPH_RELEASES = ['jewel', 'kraken', 'luminous', 'mimic', 'nautilus']
Example #42
0
def get_docstring(filename, verbose=False):
    """
    Search for assignment of the DOCUMENTATION and EXAMPLES variables
    in the given file.
    Parse DOCUMENTATION from YAML and return the YAML doc or None
    together with EXAMPLES, as plain text.

    DOCUMENTATION can be extended using documentation fragments
    loaded by the PluginLoader from the module_docs_fragments
    directory.
    """

    doc = None
    plainexamples = None
    returndocs = None

    try:
        # Thank you, Habbie, for this bit of code :-)
        M = ast.parse(''.join(open(filename)))
        for child in M.body:
            if isinstance(child, ast.Assign):
                for t in child.targets:
                    try:
                        theid = t.id
                    except AttributeError as e:
                        # skip errors can happen when trying to use the normal code
                        display.warning("Failed to assign id for %s on %s, skipping" % (t, filename))
                        continue

                    if 'DOCUMENTATION' in theid:
                        doc = AnsibleLoader(child.value.s, file_name=filename).get_single_data()
                        fragments = doc.get('extends_documentation_fragment', [])

                        if isinstance(fragments, string_types):
                            fragments = [ fragments ]

                        # Allow the module to specify a var other than DOCUMENTATION
                        # to pull the fragment from, using dot notation as a separator
                        for fragment_slug in fragments:
                            fragment_slug = fragment_slug.lower()
                            if '.' in fragment_slug:
                                fragment_name, fragment_var = fragment_slug.split('.', 1)
                                fragment_var = fragment_var.upper()
                            else:
                                fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION'

                            fragment_class = fragment_loader.get(fragment_name)
                            assert fragment_class is not None

                            fragment_yaml = getattr(fragment_class, fragment_var, '{}')
                            fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data()

                            if fragment.has_key('notes'):
                                notes = fragment.pop('notes')
                                if notes:
                                    if not doc.has_key('notes'):
                                        doc['notes'] = []
                                    doc['notes'].extend(notes)

                            if 'options' not in fragment.keys():
                                raise Exception("missing options in fragment, possibly misformatted?")

                            for key, value in fragment.items():
                                if not doc.has_key(key):
                                    doc[key] = value
                                else:
                                    if isinstance(doc[key], MutableMapping):
                                        doc[key].update(value)
                                    elif isinstance(doc[key], MutableSet):
                                        doc[key].add(value)
                                    elif isinstance(doc[key], MutableSequence):
                                        doc[key] = sorted(frozenset(doc[key] + value))
                                    else:
                                        raise Exception("Attempt to extend a documentation fragement of unknown type")

                    elif 'EXAMPLES' in theid:
                        plainexamples = child.value.s[1:]  # Skip first empty line

                    elif 'RETURN' in theid:
                        returndocs = child.value.s[1:]
    except:
        display.error("unable to parse %s" % filename)
        if verbose == True:
            display.display("unable to parse %s" % filename)
            raise
    return doc, plainexamples, returndocs
Example #43
0
    def run(self, tmp=None, task_vars=None):
        # we must use vars, since task_vars will have un-processed variables
        host_vars = self.expand_all_jinja2_templates(task_vars['vars'])
        host = host_vars['ansible_hostname']
        mode = self._task.args.get('mode', 'permissive')

        self._supports_check_mode = False  # XXX ?
        self._supports_async = True

        result = {}
        result['_ansible_verbose_always'] = True

        try:
            notario_store["groups"] = host_vars["groups"]
            notario_store["containerized_deployment"] = host_vars["containerized_deployment"]  # noqa E501
            notario.validate(host_vars, install_options, defined_keys=True)

            if host_vars["ceph_origin"] == "repository" and not host_vars["containerized_deployment"]:
                notario.validate(
                    host_vars, ceph_origin_repository, defined_keys=True)

                if host_vars["ceph_repository"] == "community":
                    notario.validate(
                        host_vars, ceph_repository_community, defined_keys=True)  # noqa E501

                if host_vars["ceph_repository"] == "rhcs":
                    notario.validate(
                        host_vars, ceph_repository_rhcs, defined_keys=True)

                if host_vars["ceph_repository"] == "dev":
                    notario.validate(
                        host_vars, ceph_repository_dev, defined_keys=True)

            # store these values because one must be defined
            # and the validation method
            # will need access to all three through the store
            notario_store["monitor_address"] = host_vars.get(
                "monitor_address", None)
            notario_store["monitor_address_block"] = host_vars.get(
                "monitor_address_block", None)
            notario_store["monitor_interface"] = host_vars.get(
                "monitor_interface", None)

            if host_vars["mon_group_name"] in host_vars["group_names"]:
                notario.validate(host_vars, monitor_options, defined_keys=True)

            notario_store["radosgw_address"] = host_vars.get(
                "radosgw_address", None)
            notario_store["radosgw_address_block"] = host_vars.get(
                "radosgw_address_block", None)
            notario_store["radosgw_interface"] = host_vars.get(
                "radosgw_interface", None)

            if host_vars["rgw_group_name"] in host_vars["group_names"]:
                notario.validate(host_vars, rados_options, defined_keys=True)

            # validate osd scenario setup
            if host_vars["osd_group_name"] in host_vars["group_names"]:
                notario.validate(host_vars, osd_options, defined_keys=True)
                notario_store['osd_objectstore'] = host_vars["osd_objectstore"]

                if host_vars.get("devices"):
                    notario.validate(
                        host_vars, lvm_batch_scenario, defined_keys=True)
                elif notario_store['osd_objectstore'] == 'filestore':
                    notario.validate(
                        host_vars, lvm_filestore_scenario, defined_keys=True)  # noqa E501
                elif notario_store['osd_objectstore'] == 'bluestore':
                    notario.validate(
                        host_vars, lvm_bluestore_scenario, defined_keys=True)  # noqa E501

        except Invalid as error:
            display.vvvv("Notario Failure: %s" % str(error))
            msg = "[{}] Validation failed for variable: {}".format(
                host, error.path[0])
            display.error(msg)
            reason = "[{}] Reason: {}".format(host, error.reason)
            try:
                if "schema is missing" not in error.message:
                    for i in range(0, len(error.path)):
                        if i == 0:
                            given = "[{}] Given value for {}".format(
                                    host, error.path[0])
                        else:
                            given = given + ": {}".format(error.path[i])
                    if given:
                        display.error(given)
                else:
                    given = ""
                    reason = "[{}] Reason: {}".format(host, error.message)
            except KeyError:
                given = ""
            display.error(reason)
            result['failed'] = mode == 'strict'
            result['msg'] = "\n".join([s for s in (msg, reason, given) if len(s) > 0])
            result['stderr_lines'] = result['msg'].split('\n')


        return result
Example #44
0
    def run(self):
        ''' use Runner lib to do SSH things '''

        super(PullCLI, self).run()

        # log command line
        now = datetime.datetime.now()
        display.display(now.strftime("Starting Ansible Pull at %F %T"))
        display.display(' '.join(sys.argv))

        # Build Checkout command
        # Now construct the ansible command
        node = platform.node()
        host = socket.getfqdn()
        limit_opts = 'localhost,%s,127.0.0.1' % ','.join(
            set([host, node,
                 host.split('.')[0],
                 node.split('.')[0]]))
        base_opts = '-c local '
        if self.options.verbosity > 0:
            base_opts += ' -%s' % ''.join(
                ["v" for x in range(0, self.options.verbosity)])

        # Attempt to use the inventory passed in as an argument
        # It might not yet have been downloaded so use localhost as default
        inv_opts = ''
        if getattr(self.options, 'inventory'):
            for inv in self.options.inventory:
                if isinstance(inv, list):
                    inv_opts += " -i '%s' " % ','.join(inv)
                elif ',' in inv or os.path.exists(inv):
                    inv_opts += ' -i %s ' % inv
        else:
            inv_opts = "-i 'localhost,'"

        # FIXME: enable more repo modules hg/svn?
        if self.options.module_name == 'git':
            repo_opts = "name=%s dest=%s" % (self.options.url,
                                             self.options.dest)
            if self.options.checkout:
                repo_opts += ' version=%s' % self.options.checkout

            if self.options.accept_host_key:
                repo_opts += ' accept_hostkey=yes'

            if self.options.private_key_file:
                repo_opts += ' key_file=%s' % self.options.private_key_file

            if self.options.verify:
                repo_opts += ' verify_commit=yes'

            if self.options.clean:
                repo_opts += ' force=yes'

            if self.options.tracksubs:
                repo_opts += ' track_submodules=yes'

            if not self.options.fullclone:
                repo_opts += ' depth=1'

        path = module_loader.find_plugin(self.options.module_name)
        if path is None:
            raise AnsibleOptionsError(
                ("module '%s' not found.\n" % self.options.module_name))

        bin_path = os.path.dirname(os.path.abspath(sys.argv[0]))
        # hardcode local and inventory/host as this is just meant to fetch the repo
        cmd = '%s/ansible %s %s -m %s -a "%s" all -l "%s"' % (
            bin_path, inv_opts, base_opts, self.options.module_name, repo_opts,
            limit_opts)

        for ev in self.options.extra_vars:
            cmd += ' -e "%s"' % ev

        # Nap?
        if self.options.sleep:
            display.display("Sleeping for %d seconds..." % self.options.sleep)
            time.sleep(self.options.sleep)

        # RUN the Checkout command
        display.debug("running ansible with VCS module to checkout repo")
        display.vvvv('EXEC: %s' % cmd)
        rc, out, err = run_cmd(cmd, live=True)

        if rc != 0:
            if self.options.force:
                display.warning(
                    "Unable to update repository. Continuing with (forced) run of playbook."
                )
            else:
                return rc
        elif self.options.ifchanged and '"changed": true' not in out:
            display.display("Repository has not changed, quitting.")
            return 0

        playbook = self.select_playbook(self.options.dest)
        if playbook is None:
            raise AnsibleOptionsError("Could not find a playbook to run.")

        # Build playbook command
        cmd = '%s/ansible-playbook %s %s' % (bin_path, base_opts, playbook)
        if self.options.vault_password_files:
            for vault_password_file in self.options.vault_password_files:
                cmd += " --vault-password-file=%s" % vault_password_file
        if inv_opts:
            cmd += ' %s' % inv_opts
        for ev in self.options.extra_vars:
            cmd += ' -e "%s"' % ev
        if self.options.ask_sudo_pass or self.options.ask_su_pass or self.options.become_ask_pass:
            cmd += ' --ask-become-pass'
        if self.options.skip_tags:
            cmd += ' --skip-tags "%s"' % to_native(u','.join(
                self.options.skip_tags))
        if self.options.tags:
            cmd += ' -t "%s"' % to_native(u','.join(self.options.tags))
        if self.options.subset:
            cmd += ' -l "%s"' % self.options.subset
        else:
            cmd += ' -l "%s"' % limit_opts
        if self.options.check:
            cmd += ' -C'

        os.chdir(self.options.dest)

        # RUN THE PLAYBOOK COMMAND
        display.debug("running ansible-playbook to do actual work")
        display.debug('EXEC: %s' % cmd)
        rc, out, err = run_cmd(cmd, live=True)

        if self.options.purge:
            os.chdir('/')
            try:
                shutil.rmtree(self.options.dest)
            except Exception as e:
                display.error("Failed to remove %s: %s" %
                              (self.options.dest, str(e)))

        return rc
Example #45
0
    def run(self):
        ''' use Runner lib to do SSH things '''

        super(PullCLI, self).run()

        # log command line
        now = datetime.datetime.now()
        display.display(now.strftime("Starting Ansible Pull at %F %T"))
        display.display(' '.join(sys.argv))

        # Build Checkout command
        # Now construct the ansible command
        node = platform.node()
        host = socket.getfqdn()
        limit_opts = 'localhost,%s,127.0.0.1' % ','.join(set([host, node, host.split('.')[0], node.split('.')[0]]))
        base_opts = '-c local '
        if self.options.verbosity > 0:
            base_opts += ' -%s' % ''.join([ "v" for x in range(0, self.options.verbosity) ])

        # Attempt to use the inventory passed in as an argument
        # It might not yet have been downloaded so use localhost if note
        if not self.options.inventory or not os.path.exists(self.options.inventory):
            inv_opts = 'localhost,'
        else:
            inv_opts = self.options.inventory

        #FIXME: enable more repo modules hg/svn?
        if self.options.module_name == 'git':
            repo_opts = "name=%s dest=%s" % (self.options.url, self.options.dest)
            if self.options.checkout:
                repo_opts += ' version=%s' % self.options.checkout

            if self.options.accept_host_key:
                repo_opts += ' accept_hostkey=yes'

            if self.options.private_key_file:
                repo_opts += ' key_file=%s' % self.options.private_key_file

            if self.options.verify:
                repo_opts += ' verify_commit=yes'

            if not self.options.fullclone:
                repo_opts += ' depth=1'


        path = module_loader.find_plugin(self.options.module_name)
        if path is None:
            raise AnsibleOptionsError(("module '%s' not found.\n" % self.options.module_name))

        bin_path = os.path.dirname(os.path.abspath(sys.argv[0]))
        cmd = '%s/ansible -i "%s" %s -m %s -a "%s" "%s"' % (
            bin_path, inv_opts, base_opts, self.options.module_name, repo_opts, limit_opts
        )

        for ev in self.options.extra_vars:
            cmd += ' -e "%s"' % ev

        # Nap?
        if self.options.sleep:
            display.display("Sleeping for %d seconds..." % self.options.sleep)
            time.sleep(self.options.sleep)

        # RUN the Checkout command
        display.debug("running ansible with VCS module to checkout repo")
        display.vvvv('EXEC: %s' % cmd)
        rc, out, err = run_cmd(cmd, live=True)

        if rc != 0:
            if self.options.force:
                display.warning("Unable to update repository. Continuing with (forced) run of playbook.")
            else:
                return rc
        elif self.options.ifchanged and '"changed": true' not in out:
            display.display("Repository has not changed, quitting.")
            return 0

        playbook = self.select_playbook(self.options.dest)
        if playbook is None:
            raise AnsibleOptionsError("Could not find a playbook to run.")

        # Build playbook command
        cmd = '%s/ansible-playbook %s %s' % (bin_path, base_opts, playbook)
        if self.options.vault_password_file:
            cmd += " --vault-password-file=%s" % self.options.vault_password_file
        if self.options.inventory:
            cmd += ' -i "%s"' % self.options.inventory
        for ev in self.options.extra_vars:
            cmd += ' -e "%s"' % ev
        if self.options.ask_sudo_pass or self.options.ask_su_pass or self.options.become_ask_pass:
            cmd += ' --ask-become-pass'
        if self.options.tags:
            cmd += ' -t "%s"' % self.options.tags
        if self.options.subset:
            cmd += ' -l "%s"' % self.options.subset

        os.chdir(self.options.dest)

        # RUN THE PLAYBOOK COMMAND
        display.debug("running ansible-playbook to do actual work")
        display.debug('EXEC: %s' % cmd)
        rc, out, err = run_cmd(cmd, live=True)

        if self.options.purge:
            os.chdir('/')
            try:
                shutil.rmtree(self.options.dest)
            except Exception as e:
                display.error("Failed to remove %s: %s" % (self.options.dest, str(e)))

        return rc
Example #46
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result = True
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                display.debug("getting the remaining hosts for this loop")
                hosts_left = [
                    host
                    for host in self._inventory.get_hosts(iterator._play.hosts)
                    if host.name not in self._tqm._unreachable_hosts
                ]
                display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest = False
                choose_step = True

                # flag set if task is set to any_errors_fatal
                any_errors_fatal = False

                results = []
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    if self._tqm._terminated:
                        break

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action,
                                                   class_only=True)
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        action = None

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run(host):
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            display.debug(
                                "'%s' skipped because role has already run" %
                                task)
                            continue

                    if task.action == 'meta':
                        self._execute_meta(task, play_context, iterator)
                    else:
                        # handle step if needed, skip meta actions as they are used internally
                        if self._step and choose_step:
                            if self._take_step(task):
                                choose_step = False
                            else:
                                skip_rest = True
                                break

                        display.debug("getting variables")
                        task_vars = self._variable_manager.get_vars(
                            loader=self._loader,
                            play=iterator._play,
                            host=host,
                            task=task)
                        self.add_tqm_variables(task_vars, play=iterator._play)
                        templar = Templar(loader=self._loader,
                                          variables=task_vars)
                        display.debug("done getting variables")

                        run_once = templar.template(
                            task.run_once) or action and getattr(
                                action, 'BYPASS_HOST_LOOP', False)

                        if (task.any_errors_fatal
                                or run_once) and not task.ignore_errors:
                            any_errors_fatal = True

                        if not callback_sent:
                            display.debug(
                                "sending task start callback, copying the task so we can template it temporarily"
                            )
                            saved_name = task.name
                            display.debug(
                                "done copying, going to template now")
                            try:
                                task.name = to_unicode(templar.template(
                                    task.name, fail_on_undefined=False),
                                                       nonstring='empty')
                                display.debug("done templating")
                            except:
                                # just ignore any errors during task name templating,
                                # we don't care if it just shows the raw name
                                display.debug(
                                    "templating failed for some reason")
                                pass
                            display.debug("here goes the callback...")
                            self._tqm.send_callback(
                                'v2_playbook_on_task_start',
                                task,
                                is_conditional=False)
                            task.name = saved_name
                            callback_sent = True
                            display.debug("sending task start callback")

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, play_context)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                    results += self._process_pending_results(iterator,
                                                             one_pass=True)

                # go to next host/task group
                if skip_rest:
                    continue

                display.debug(
                    "done queuing things up, now waiting for results queue to drain"
                )
                results += self._wait_on_pending_results(iterator)
                host_results.extend(results)

                if not work_to_do and len(iterator.get_failed_hosts()) > 0:
                    display.debug("out of hosts to run on")
                    self._tqm.send_callback(
                        'v2_playbook_on_no_hosts_remaining')
                    result = self._tqm.RUN_ERROR
                    break

                try:
                    included_files = IncludedFile.process_include_results(
                        host_results,
                        self._tqm,
                        iterator=iterator,
                        inventory=self._inventory,
                        loader=self._loader,
                        variable_manager=self._variable_manager)
                except AnsibleError as e:
                    return self._tqm.RUN_ERROR

                include_failure = False
                if len(included_files) > 0:
                    display.debug("we have included files to process")
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    display.debug("generating all_blocks data")
                    all_blocks = dict((host, []) for host in hosts_left)
                    display.debug("done generating all_blocks data")
                    for included_file in included_files:
                        display.debug("processing included file: %s" %
                                      included_file._filename)
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(
                                included_file, iterator=iterator)

                            display.debug(
                                "iterating over new_blocks loaded from include file"
                            )
                            for new_block in new_blocks:
                                task_vars = self._variable_manager.get_vars(
                                    loader=self._loader,
                                    play=iterator._play,
                                    task=included_file._task,
                                )
                                display.debug("filtering new block on tags")
                                final_block = new_block.filter_tagged_tasks(
                                    play_context, task_vars)
                                display.debug(
                                    "done filtering new block on tags")

                                noop_block = Block(parent_block=task._block)
                                noop_block.block = [
                                    noop_task for t in new_block.block
                                ]
                                noop_block.always = [
                                    noop_task for t in new_block.always
                                ]
                                noop_block.rescue = [
                                    noop_task for t in new_block.rescue
                                ]

                                for host in hosts_left:
                                    if host in included_file._hosts:
                                        all_blocks[host].append(final_block)
                                    else:
                                        all_blocks[host].append(noop_block)
                            display.debug(
                                "done iterating over new_blocks loaded from include file"
                            )

                        except AnsibleError as e:
                            for host in included_file._hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                            display.error(to_unicode(e), wrap_text=False)
                            include_failure = True
                            continue

                    # finally go through all of the hosts and append the
                    # accumulated blocks to their list of tasks
                    display.debug(
                        "extending task lists for all hosts with included blocks"
                    )

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                    display.debug("done extending task lists")
                    display.debug("done processing included files")

                display.debug("results queue empty")

                display.debug("checking for any_errors_fatal")
                failed_hosts = []
                unreachable_hosts = []
                for res in results:
                    if res.is_failed():
                        failed_hosts.append(res._host.name)
                    elif res.is_unreachable():
                        unreachable_hosts.append(res._host.name)

                # if any_errors_fatal and we had an error, mark all hosts as failed
                if any_errors_fatal and (len(failed_hosts) > 0
                                         or len(unreachable_hosts) > 0):
                    for host in hosts_left:
                        # don't double-mark hosts, or the iterator will potentially
                        # fail them out of the rescue/always states
                        if host.name not in failed_hosts:
                            self._tqm._failed_hosts[host.name] = True
                            iterator.mark_host_failed(host)
                    self._tqm.send_callback(
                        'v2_playbook_on_no_hosts_remaining')
                    return self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for any_errors_fatal")

                display.debug("checking for max_fail_percentage")
                if iterator._play.max_fail_percentage is not None and len(
                        results) > 0:
                    percentage = iterator._play.max_fail_percentage / 100.0

                    if (len(self._tqm._failed_hosts) /
                            len(results)) > percentage:
                        for host in hosts_left:
                            # don't double-mark hosts, or the iterator will potentially
                            # fail them out of the rescue/always states
                            if host.name not in failed_hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                        self._tqm.send_callback(
                            'v2_playbook_on_no_hosts_remaining')
                        return self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for max_fail_percentage")

            except (IOError, EOFError) as e:
                display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return self._tqm.RUN_UNKNOWN_ERROR

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)
Example #47
0
    def run(self):
        ''' use Runner lib to do SSH things '''

        super(PullCLI, self).run()

        # log command line
        now = datetime.datetime.now()
        display.display(now.strftime("Starting Ansible Pull at %F %T"))
        display.display(' '.join(sys.argv))

        # Build Checkout command
        # Now construct the ansible command
        node = platform.node()
        host = socket.getfqdn()
        limit_opts = 'localhost,%s,127.0.0.1' % ','.join(set([host, node, host.split('.')[0], node.split('.')[0]]))
        base_opts = '-c local "%s"' % limit_opts
        if self.options.verbosity > 0:
            base_opts += ' -%s' % ''.join([ "v" for x in range(0, self.options.verbosity) ])

        # Attempt to use the inventory passed in as an argument
        # It might not yet have been downloaded so use localhost if note
        if not self.options.inventory or not os.path.exists(self.options.inventory):
            inv_opts = 'localhost,'
        else:
            inv_opts = self.options.inventory

        #TODO: enable more repo modules hg/svn?
        if self.options.module_name == 'git':
            repo_opts = "name=%s dest=%s" % (self.options.url, self.options.dest)
            if self.options.checkout:
                repo_opts += ' version=%s' % self.options.checkout

            if self.options.accept_host_key:
                repo_opts += ' accept_hostkey=yes'

            if self.options.private_key_file:
                repo_opts += ' key_file=%s' % self.options.private_key_file

            if self.options.verify:
                repo_opts += ' verify_commit=yes'

        path = module_loader.find_plugin(self.options.module_name)
        if path is None:
            raise AnsibleOptionsError(("module '%s' not found.\n" % self.options.module_name))

        bin_path = os.path.dirname(os.path.abspath(sys.argv[0]))
        cmd = '%s/ansible -i "%s" %s -m %s -a "%s"' % (
            bin_path, inv_opts, base_opts, self.options.module_name, repo_opts
        )

        for ev in self.options.extra_vars:
            cmd += ' -e "%s"' % ev

        # Nap?
        if self.options.sleep:
            display.display("Sleeping for %d seconds..." % self.options.sleep)
            time.sleep(self.options.sleep)

        # RUN the Checkout command
        rc, out, err = run_cmd(cmd, live=True)

        if rc != 0:
            if self.options.force:
                display.warning("Unable to update repository. Continuing with (forced) run of playbook.")
            else:
                return rc
        elif self.options.ifchanged and '"changed": true' not in out:
            display.display("Repository has not changed, quitting.")
            return 0

        playbook = self.select_playbook(path)

        if playbook is None:
            raise AnsibleOptionsError("Could not find a playbook to run.")

        # Build playbook command
        cmd = '%s/ansible-playbook %s %s' % (bin_path, base_opts, playbook)
        if self.options.vault_password_file:
            cmd += " --vault-password-file=%s" % self.options.vault_password_file
        if self.options.inventory:
            cmd += ' -i "%s"' % self.options.inventory
        for ev in self.options.extra_vars:
            cmd += ' -e "%s"' % ev
        if self.options.ask_sudo_pass:
            cmd += ' -K'
        if self.options.tags:
            cmd += ' -t "%s"' % self.options.tags
        if self.options.limit:
            cmd += ' -l "%s"' % self.options.limit

        os.chdir(self.options.dest)

        # RUN THE PLAYBOOK COMMAND
        rc, out, err = run_cmd(cmd, live=True)

        if self.options.purge:
            os.chdir('/')
            try:
                shutil.rmtree(self.options.dest)
            except Exception as e:
                display.error("Failed to remove %s: %s" % (self.options.dest, str(e)))

        return rc