Ejemplo n.º 1
0
    def _normalize_old_style_args(self, thing, action):
        '''
        deals with fuzziness in old-style (action/local_action) module invocations
        returns tuple of (module_name, dictionary_args)

        possible example inputs:
            { 'local_action' : 'shell echo hi' }
            { 'action'       : 'shell echo hi' }
            { 'local_action' : { 'module' : 'ec2', 'x' : 1, 'y': 2 }}
        standardized outputs like:
            ( 'command', { _raw_params: 'echo hi', _uses_shell: True }
        '''

        if isinstance(thing, dict):
            # form is like: local_action: { module: 'xyz', x: 2, y: 3 } ... uncommon!
            args = thing
        elif isinstance(thing, string_types):
            # form is like: local_action: copy src=a dest=b ... pretty common
            check_raw = action in ('command', 'shell', 'script', 'raw')
            args = parse_kv(thing, check_raw=check_raw)
        elif thing is None:
            # this can happen with modules which take no params, like ping:
            args = None
        else:
            raise AnsibleParserError("unexpected parameter type in action: %s" % type(thing), obj=self._task_ds)
        return args
Ejemplo n.º 2
0
    def _normalize_new_style_args(self, thing):
        '''
        deals with fuzziness in new style module invocations
        accepting key=value pairs and dictionaries, and always returning dictionaries
        returns tuple of (module_name, dictionary_args)

        possible example inputs:
           { 'shell' : 'echo hi' }
           { 'ec2'   : { 'region' : 'xyz' }
           { 'ec2'   : 'region=xyz' }
        standardized outputs like:
           ('ec2', { region: 'xyz'} )
        '''

        action = None
        args = None

        if isinstance(thing, dict):
            # form is like:  copy: { src: 'a', dest: 'b' } ... common for structured (aka "complex") args
            thing = thing.copy()
            if 'module' in thing:
                action = thing['module']
                args = thing.copy()
                del args['module']

        elif isinstance(thing, string_types):
            # form is like:  copy: src=a dest=b ... common shorthand throughout ansible
            (action, args) = self._split_module_string(thing)
            args = parse_kv(args)

        else:
            # need a dict or a string, so giving up
            raise AnsibleParsingError("unexpected parameter type in action: %s" % type(thing), obj=self._task_ds)

        return (action, args)
Ejemplo n.º 3
0
def boilerplate_module(modfile, args, interpreter, check, destfile):
    """ simulate what ansible does with new style modules """

    loader = DataLoader()

    complex_args = {}
    if args.startswith("@"):
        # Argument is a YAML file (JSON is a subset of YAML)
        complex_args = utils_vars.combine_vars(complex_args,
                                               loader.load_from_file(args[1:]))
        args = ''
    elif args.startswith("{"):
        # Argument is a YAML document (not a file)
        complex_args = utils_vars.combine_vars(complex_args, loader.load(args))
        args = ''

    if args:
        parsed_args = parse_kv(args)
        complex_args = utils_vars.combine_vars(complex_args, parsed_args)

    task_vars = {}
    if interpreter:
        if '=' not in interpreter:
            print("interpreter must by in the form of \
                   ansible_python_interpreter=/usr/bin/python")
            sys.exit(1)
        interpreter_type, interpreter_path = interpreter.split('=')
        if not interpreter_type.startswith('ansible_'):
            interpreter_type = 'ansible_%s' % interpreter_type
        if not interpreter_type.endswith('_interpreter'):
            interpreter_type = '%s_interpreter' % interpreter_type
        task_vars[interpreter_type] = interpreter_path

    if check:
        complex_args['_ansible_check_mode'] = True

    modname = os.path.basename(modfile)
    modname = os.path.splitext(modname)[0]
    (module_data, module_style, shebang) = module_common.modify_module(
        modname,
        modfile,
        complex_args,
        task_vars=task_vars
    )

    if module_style == 'new' \
       and 'ZIPLOADER_WRAPPER = True' in module_data:
        module_style = 'ziploader'

    modfile2_path = os.path.expanduser(destfile)
    print("* including generated source,\
           if any, saving to: %s" % modfile2_path)
    if module_style not in ('ziploader', 'old'):
        print("* this may offset any line numbers in tracebacks/debuggers!")
    modfile2 = open(modfile2_path, 'w')
    modfile2.write(module_data)
    modfile2.close()
    modfile = modfile2_path

    return (modfile2_path, modname, module_style)
Ejemplo n.º 4
0
    def run(self, terms, variables, **kwargs):
        results = []

        for term in terms:
            try:
                self.reset()  # clear out things for this iteration
                try:
                    if not self.parse_simple_args(term):
                        self.parse_kv_args(parse_kv(term))
                except AnsibleError:
                    raise
                except Exception as e:
                    raise AnsibleError("unknown error parsing with_sequence arguments: %r. Error was: %s" % (term, e))

                self.sanity_check()
                if self.stride != 0:
                    results.extend(self.generate_sequence())
            except AnsibleError:
                raise
            except Exception as e:
                raise AnsibleError(
                    "unknown error generating sequence: %s" % e
                )

        return results
Ejemplo n.º 5
0
    def _normalize_parameters(self, thing, action=None, additional_args=dict()):
        '''
        arguments can be fuzzy.  Deal with all the forms.
        '''

        # final args are the ones we'll eventually return, so first update
        # them with any additional args specified, which have lower priority
        # than those which may be parsed/normalized next
        final_args = dict()
        if additional_args:
            final_args.update(additional_args)

        # how we normalize depends if we figured out what the module name is
        # yet.  If we have already figured it out, it's an 'old style' invocation.
        # otherwise, it's not

        if action is not None:
            args = self._normalize_old_style_args(thing, action)
        else:
            (action, args) = self._normalize_new_style_args(thing)

        # this can occasionally happen, simplify
        if args and 'args' in args:
            tmp_args = args['args']
            del args['args']
            if isinstance(tmp_args, string_types):
                tmp_args = parse_kv(tmp_args)
            args.update(tmp_args)

        # finally, update the args we're going to return with the ones
        # which were normalized above
        if args:
            final_args.update(args)

        return (action, final_args)
Ejemplo n.º 6
0
 def _play_ds(self, pattern, async_val, poll):
     check_raw = self.options.module_name in ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw')
     return dict(
         name="Ansible Ad-Hoc",
         hosts=pattern,
         gather_facts='no',
         tasks=[dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args, check_raw=check_raw)), async_val=async_val,
                     poll=poll)]
     )
Ejemplo n.º 7
0
def write_argsfile(argstring, json=False):
    """ Write args to a file for old-style module's use. """
    argspath = os.path.expanduser("~/.ansible_test_module_arguments")
    argsfile = open(argspath, 'w')
    if json:
        args = parse_kv(argstring)
        argstring = jsonify(args)
    argsfile.write(argstring)
    argsfile.close()
    return argspath
Ejemplo n.º 8
0
    def _normalize_parameters(self, thing, action=None, additional_args=dict()):
        '''
        arguments can be fuzzy.  Deal with all the forms.
        '''

        # final args are the ones we'll eventually return, so first update
        # them with any additional args specified, which have lower priority
        # than those which may be parsed/normalized next
        final_args = dict()
        if additional_args:
            if isinstance(additional_args, string_types):
                templar = Templar(loader=None)
                if templar._contains_vars(additional_args):
                    final_args['_variable_params'] = additional_args
                else:
                    raise AnsibleParserError("Complex args containing variables cannot use bare variables, and must use the full variable style ('{{var_name}}')")
            elif isinstance(additional_args, dict):
                final_args.update(additional_args)
            else:
                raise AnsibleParserError('Complex args must be a dictionary or variable string ("{{var}}").')

        # how we normalize depends if we figured out what the module name is
        # yet.  If we have already figured it out, it's an 'old style' invocation.
        # otherwise, it's not

        if action is not None:
            args = self._normalize_old_style_args(thing, action)
        else:
            (action, args) = self._normalize_new_style_args(thing)

            # this can occasionally happen, simplify
            if args and 'args' in args:
                tmp_args = args.pop('args')
                if isinstance(tmp_args, string_types):
                    tmp_args = parse_kv(tmp_args)
                args.update(tmp_args)

        # only internal variables can start with an underscore, so
        # we don't allow users to set them directy in arguments
        if args and action not in ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw'):
            for arg in args:
                arg = to_text(arg)
                if arg.startswith('_ansible_'):
                    raise AnsibleError("invalid parameter specified for action '%s': '%s'" % (action, arg))

        # finally, update the args we're going to return with the ones
        # which were normalized above
        if args:
            final_args.update(args)

        return (action, final_args)
Ejemplo n.º 9
0
def load_extra_vars(loader, options):
    extra_vars = {}
    for extra_vars_opt in options.extra_vars:
        extra_vars_opt = to_text(extra_vars_opt, errors='surrogate_or_strict')
        if extra_vars_opt.startswith(u"@"):
            # Argument is a YAML file (JSON is a subset of YAML)
            data = loader.load_from_file(extra_vars_opt[1:])
        elif extra_vars_opt and extra_vars_opt[0] in u'[{':
            # Arguments as YAML
            data = loader.load(extra_vars_opt)
        else:
            # Arguments as Key-value
            data = parse_kv(extra_vars_opt)
        extra_vars = combine_vars(extra_vars, data)
    return extra_vars
Ejemplo n.º 10
0
def _parse_parameters(term):
    """Hacky parsing of params

    See https://github.com/ansible/ansible-modules-core/issues/1968#issuecomment-136842156
    and the first_found lookup For how we want to fix this later
    """
    first_split = term.split(' ', 1)
    if len(first_split) <= 1:
        # Only a single argument given, therefore it's a path
        relpath = term
        params = dict()
    else:
        relpath = first_split[0]
        params = parse_kv(first_split[1])
        if '_raw_params' in params:
            # Spaces in the path?
            relpath = u' '.join((relpath, params['_raw_params']))
            del params['_raw_params']

            # Check that we parsed the params correctly
            if not term.startswith(relpath):
                # Likely, the user had a non parameter following a parameter.
                # Reject this as a user typo
                raise AnsibleError('Unrecognized value after key=value parameters given to password lookup')
        # No _raw_params means we already found the complete path when
        # we split it initially

    # Check for invalid parameters.  Probably a user typo
    invalid_params = frozenset(params.keys()).difference(VALID_PARAMS)
    if invalid_params:
        raise AnsibleError('Unrecognized parameter(s) given to password lookup: %s' % ', '.join(invalid_params))

    # Set defaults
    params['length'] = int(params.get('length', DEFAULT_LENGTH))
    params['encrypt'] = params.get('encrypt', None)

    params['chars'] = params.get('chars', None)
    if params['chars']:
        tmp_chars = []
        if u',,' in params['chars']:
            tmp_chars.append(u',')
        tmp_chars.extend(c for c in params['chars'].replace(u',,', u',').split(u',') if c)
        params['chars'] = tmp_chars
    else:
        # Default chars for password
        params['chars'] = [u'ascii_letters', u'digits', u".,:-_"]

    return relpath, params
Ejemplo n.º 11
0
    def run(self, terms, variables=None, **kwargs):
        # Ensure pychef has been loaded
        if not HAS_CHEF:
            raise AnsibleError('PyChef needed for lookup plugin, try `pip install pychef`')

        for term in terms:
            self.parse_kv_args(parse_kv(term))

        api_object = chef.autoconfigure()

        if not isinstance(api_object, chef.api.ChefAPI):
            raise AnsibleError('Unable to connect to Chef Server API.')

        data_bag_object = chef.DataBag(self.name)

        data_bag_item = data_bag_object[self.item]

        return [dict(data_bag_item)]
Ejemplo n.º 12
0
def _parse_parameters(term):
    # Hacky parsing of params taken from password lookup.
    first_split = term.split(' ', 1)
    if len(first_split) <= 1:
        # Only a single argument given, therefore it's a path
        name = term
        params = dict()
    else:
        name = first_split[0]
        params = parse_kv(first_split[1])
        if '_raw_params' in params:
            # Spaces in the path?
            name = ' '.join((name, params['_raw_params']))
            del params['_raw_params']

            # Check that we parsed the params correctly
            if not term.startswith(name):
                # Likely, the user had a non parameter following a parameter.
                # Reject this as a user typo
                raise AnsibleError('Unrecognized value after key=value parameters given to password lookup')
        # No _raw_params means we already found the complete path when
        # we split it initially

    # Check for invalid parameters.  Probably a user typo
    invalid_params = frozenset(params.keys()).difference(VALID_PARAMS)
    if invalid_params:
        raise AnsibleError('Unrecognized parameter(s) given to password lookup: %s' % ', '.join(invalid_params))

    # Set defaults
    params['length'] = int(params.get('length', DEFAULT_LENGTH))
    symbols = params.get('symbols', 'False')
    if symbols.lower() in ['true', 'yes']:
        params['symbols'] = True
    else:
        params['symbols'] = False
    regenerate = params.get('regenerate', 'False')
    if regenerate.lower() in ['true', 'yes']:
        params['regenerate'] = True
    else:
        params['regenerate'] = False

    return name, params
Ejemplo n.º 13
0
    def _preprocess_include(self, ds, new_ds, k, v):
        '''
        Splits the include line up into filename and parameters
        '''

        # The include line must include at least one item, which is the filename
        # to include. Anything after that should be regarded as a parameter to the include
        items = split_args(v)
        if len(items) == 0:
            raise AnsibleParserError("include statements must specify the file name to include", obj=ds)
        else:
            new_ds['include'] = items[0]
            if len(items) > 1:
                # rejoin the parameter portion of the arguments and
                # then use parse_kv() to get a dict of params back
                params = parse_kv(" ".join(items[1:]))
                if 'tags' in params:
                    new_ds['tags'] = params.pop('tags')
                if 'vars' in new_ds:
                    raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
                new_ds['vars'] = params
Ejemplo n.º 14
0
    def _munge_include(self, ds, new_ds, k, v):
        '''
        Splits the include line up into filename and parameters
        '''

        # The include line must include at least one item, which is the filename
        # to include. Anything after that should be regarded as a parameter to the include
        items = split_args(v)
        if len(items) == 0:
            raise AnsibleParserError("include statements must specify the file name to include", obj=ds)
        else:
            # FIXME/TODO: validate that items[0] is a file, which also
            #             exists and is readable 
            new_ds['include'] = items[0]
            if len(items) > 1:
                # rejoin the parameter portion of the arguments and
                # then use parse_kv() to get a dict of params back
                params = parse_kv(" ".join(items[1:]))
                if 'vars' in new_ds:
                    # FIXME: see fixme above regarding merging vars
                    raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
                new_ds['vars'] = params
Ejemplo n.º 15
0
def load_extra_vars(loader, options):
    extra_vars = {}
    if hasattr(options, 'extra_vars'):
        for extra_vars_opt in options.extra_vars:
            data = None
            extra_vars_opt = to_text(extra_vars_opt, errors='surrogate_or_strict')
            if extra_vars_opt.startswith(u"@"):
                # Argument is a YAML file (JSON is a subset of YAML)
                data = loader.load_from_file(extra_vars_opt[1:])
            elif extra_vars_opt and extra_vars_opt[0] in u'[{':
                # Arguments as YAML
                data = loader.load(extra_vars_opt)
            else:
                # Arguments as Key-value
                data = parse_kv(extra_vars_opt)

            if isinstance(data, MutableMapping):
                extra_vars = combine_vars(extra_vars, data)
            else:
                raise AnsibleOptionsError("Invalid extra vars data supplied. '%s' could not be made into a dictionary" % extra_vars_opt)

    return extra_vars
Ejemplo n.º 16
0
    def _normalize_old_style_args(self, thing):
        '''
        deals with fuzziness in old-style (action/local_action) module invocations
        returns tuple of (module_name, dictionary_args)

        possible example inputs:
            { 'local_action' : 'shell echo hi' }
            { 'action'       : 'shell echo hi' }
            { 'local_action' : { 'module' : 'ec2', 'x' : 1, 'y': 2 }}
        standardized outputs like:
            ( 'command', { _raw_params: 'echo hi', _uses_shell: True }
        '''

        if isinstance(thing, dict):
            # form is like: local_action: { module: 'xyz', x: 2, y: 3 } ... uncommon!
            args = thing
        elif isinstance(thing, string_types):
            # form is like: local_action: copy src=a dest=b ... pretty common
            args = parse_kv(thing)
        else:
            raise AnsibleParsingError("unexpected parameter type in action: %s" % type(thing), obj=self._task)
        return args
Ejemplo n.º 17
0
    def _normalize_old_style_args(self, thing):
        '''
        deals with fuzziness in old-style (action/local_action) module invocations
        returns tuple of (module_name, dictionary_args)

        possible example inputs:
            { 'local_action' : 'shell echo hi' }
            { 'action'       : 'shell echo hi' }
            { 'local_action' : { 'module' : 'ec2', 'x' : 1, 'y': 2 }}
        standardized outputs like:
            ( 'command', { _raw_params: 'echo hi', _uses_shell: True }
        '''

        if isinstance(thing, dict):
            # form is like: local_action: { module: 'xyz', x: 2, y: 3 } ... uncommon!
            args = thing
        elif isinstance(thing, string_types):
            # form is like: local_action: copy src=a dest=b ... pretty common
            args = parse_kv(thing)
        else:
            raise AnsibleParsingError(
                "unexpected parameter type in action: %s" % type(thing),
                obj=self._task_ds)
        return args
Ejemplo n.º 18
0
    def _play_ds(self, pattern, async_val, poll):
        check_raw = context.CLIARGS['module_name'] in C.MODULE_REQUIRE_ARGS

        mytask = {
            'action': {
                'module':
                context.CLIARGS['module_name'],
                'args':
                parse_kv(context.CLIARGS['module_args'], check_raw=check_raw)
            },
            'timeout': context.CLIARGS['task_timeout']
        }

        # avoid adding to tasks that don't support it, unless set, then give user an error
        if context.CLIARGS[
                'module_name'] not in C._ACTION_ALL_INCLUDE_ROLE_TASKS and any(
                    frozenset((async_val, poll))):
            mytask['async_val'] = async_val
            mytask['poll'] = poll

        return dict(name="Ansible Ad-Hoc",
                    hosts=pattern,
                    gather_facts='no',
                    tasks=[mytask])
Ejemplo n.º 19
0
    def run(self, terms, variables, **kwargs):
        results = []

        if isinstance(terms, basestring):
            terms = [ terms ]

        templar = Templar(loader=self._loader, variables=variables)

        for term in terms:
            try:
                self.reset()  # clear out things for this iteration

                term = templar.template(term)
                try:
                    if not self.parse_simple_args(term):
                        self.parse_kv_args(parse_kv(term))
                except Exception, e:
                    raise AnsibleError("unknown error parsing with_sequence arguments: %r. Error was: %s" % (term, e))

                self.sanity_check()

                results.extend(self.generate_sequence())
            except AnsibleError:
                raise
Ejemplo n.º 20
0
    def _preprocess_include(self, ds, new_ds, k, v):
        '''
        Splits the include line up into filename and parameters
        '''

        if v is None:
            raise AnsibleParserError("include parameter is missing", obj=ds)

        # The include line must include at least one item, which is the filename
        # to include. Anything after that should be regarded as a parameter to the include
        items = split_args(v)
        if len(items) == 0:
            raise AnsibleParserError("include statements must specify the file name to include", obj=ds)
        else:
            new_ds['include'] = items[0]
            if len(items) > 1:
                # rejoin the parameter portion of the arguments and
                # then use parse_kv() to get a dict of params back
                params = parse_kv(" ".join(items[1:]))
                if 'tags' in params:
                    new_ds['tags'] = params.pop('tags')
                if 'vars' in new_ds:
                    raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
                new_ds['vars'] = params
Ejemplo n.º 21
0
    def run(self):
        ''' use Runner lib to do SSH things '''

        # only thing left should be host pattern
        pattern = self.args[0]

        # ignore connection password cause we are local
        if self.options.connection == "local":
            self.options.ask_pass = False

        sshpass    = None
        becomepass    = None
        vault_pass = None

        self.normalize_become_options()
        (sshpass, becomepass) = self.ask_passwords()
        passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }

        if self.options.vault_password_file:
            # read vault_pass from a file
            vault_pass = read_vault_file(self.options.vault_password_file)
        elif self.options.ask_vault_pass:
            vault_pass = self.ask_vault_passwords(ask_vault_pass=True, ask_new_vault_pass=False, confirm_new=False)[0]

        loader = DataLoader(vault_password=vault_pass)
        variable_manager = VariableManager()

        inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=self.options.inventory)

        hosts = inventory.list_hosts(pattern)
        if len(hosts) == 0:
            self.display.warning("provided hosts list is empty, only localhost is available")

        if self.options.listhosts:
            for host in hosts:
                self.display.display('    %s' % host)
            return 0

        if self.options.module_name in C.MODULE_REQUIRE_ARGS and not self.options.module_args:
            raise AnsibleError("No argument passed to %s module" % self.options.module_name)

        #TODO: implement async support
        #if self.options.seconds:
        #    callbacks.display("background launch...\n\n", color='cyan')
        #    results, poller = runner.run_async(self.options.seconds)
        #    results = self.poll_while_needed(poller)
        #else:
        #    results = runner.run()

        # create a pseudo-play to execute the specified module via a single task
        play_ds = dict(
            name = "Ansible Ad-Hoc",
            hosts = pattern,
            gather_facts = 'no',
            tasks = [ dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args))), ]
        )

        play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)

        # now create a task queue manager to execute the play
        tqm = None
        try:
            tqm = TaskQueueManager(
                    inventory=inventory,
                    variable_manager=variable_manager,
                    loader=loader,
                    display=self.display,
                    options=self.options,
                    passwords=passwords,
                    stdout_callback='minimal',
                )
            result = tqm.run(play)
        finally:
            if tqm:
                tqm.cleanup()

        return result
Ejemplo n.º 22
0
    def run(self):
        ''' use Runner lib to do SSH things '''

        # only thing left should be host pattern
        pattern = self.args[0]

        # ignore connection password cause we are local
        if self.options.connection == "local":
            self.options.ask_pass = False

        sshpass    = None
        becomepass    = None
        vault_pass = None

        self.normalize_become_options()
        (sshpass, becomepass) = self.ask_passwords()
        passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }

        if self.options.vault_password_file:
            # read vault_pass from a file
            vault_pass = read_vault_file(self.options.vault_password_file)
        elif self.options.ask_vault_pass:
            vault_pass = self.ask_vault_passwords(ask_vault_pass=True, ask_new_vault_pass=False, confirm_new=False)[0]

        loader = DataLoader(vault_password=vault_pass)
        variable_manager = VariableManager()

        inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=self.options.inventory)
        variable_manager.set_inventory(inventory)

        hosts = inventory.list_hosts(pattern)
        if len(hosts) == 0:
            self.display.warning("provided hosts list is empty, only localhost is available")

        if self.options.listhosts:
            for host in hosts:
                self.display.display('    %s' % host)
            return 0

        if self.options.module_name in C.MODULE_REQUIRE_ARGS and not self.options.module_args:
            raise AnsibleOptionsError("No argument passed to %s module" % self.options.module_name)

        #TODO: implement async support
        #if self.options.seconds:
        #    callbacks.display("background launch...\n\n", color='cyan')
        #    results, poller = runner.run_async(self.options.seconds)
        #    results = self.poll_while_needed(poller)
        #else:
        #    results = runner.run()

        # create a pseudo-play to execute the specified module via a single task
        play_ds = dict(
            name = "Ansible Ad-Hoc",
            hosts = pattern,
            gather_facts = 'no',
            tasks = [ dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args))), ]
        )

        play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)

        # now create a task queue manager to execute the play
        tqm = None
        try:
            tqm = TaskQueueManager(
                    inventory=inventory,
                    variable_manager=variable_manager,
                    loader=loader,
                    display=self.display,
                    options=self.options,
                    passwords=passwords,
                    stdout_callback='minimal',
                )
            result = tqm.run(play)
        finally:
            if tqm:
                tqm.cleanup()

        return result
Ejemplo n.º 23
0
def _connect(manager_interrupt,
             scan_task,
             hosts,
             result_store,
             credential,
             connection_port,
             use_paramiko=False,
             forks=50,
             exclude_hosts=None,
             base_ssh_executable=None,
             ssh_timeout=None):
    """Attempt to connect to hosts using the given credential.

    :param manager_interrupt: Signal used to communicate termination of scan
    :param scan_task: The scan task for this connection job
    :param hosts: The collection of hosts to test connections
    :param result_store: The result store to accept the results.
    :param credential: The credential used for connections
    :param connection_port: The connection port
    :param use_paramiko: use paramiko instead of ssh for connection
    :param forks: number of forks to run with, default of 50
    :param exclude_hosts: Optional. Hosts to exclude from test connections
    :param base_ssh_executable: ssh executable, or None for
            'ssh'. Will be wrapped with a timeout before being passed
            to Ansible.
        :param ssh_timeout: string in the format of the 'timeout'
            command. Timeout for individual tasks.
    :returns: list of connected hosts credential tuples and
            list of host that failed connection
    """
    cred_data = CredentialSerializer(credential).data

    ssh_executable = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '../../../bin/timeout_ssh'))

    base_ssh_executable = base_ssh_executable or 'ssh'
    ssh_timeout = ssh_timeout or settings.QPC_SSH_CONNECT_TIMEOUT

    # pylint: disable=line-too-long
    # the ssh arg is required for become-pass because
    # ansible checks for an exact string match of ssh
    # anywhere in the command array
    # See https://github.com/ansible/ansible/blob/stable-2.3/lib/ansible/plugins/connection/ssh.py#L490-L500 # noqa
    # timeout_ssh will remove the ssh argument before running the command
    ssh_args = [
        '--executable=' + base_ssh_executable, '--timeout=' + ssh_timeout,
        'ssh'
    ]
    group_names, inventory = _construct_connect_inventory(
        hosts, cred_data, connection_port, forks, exclude_hosts,
        ssh_executable, ssh_args)
    inventory_file = write_inventory(inventory)
    extra_vars = {}

    _handle_ssh_passphrase(cred_data)

    error_msg = ''
    log_message = 'START CONNECT PROCESSING GROUPS'\
        ' with use_paramiko: %s,' \
        '%d forks and extra_vars=%s' % (use_paramiko,
                                        forks,
                                        extra_vars)
    scan_task.log_message(log_message)
    for idx, group_name in enumerate(group_names):
        if manager_interrupt.value == ScanJob.JOB_TERMINATE_CANCEL:
            raise NetworkCancelException()

        if manager_interrupt.value == ScanJob.JOB_TERMINATE_PAUSE:
            raise NetworkPauseException()

        group_ips = inventory.get('all').get('children').get(group_name).get(
            'hosts').keys()
        group_ips = ["'%s'" % ip for ip in group_ips]
        group_ip_string = ', '.join(group_ips)
        log_message = 'START CONNECT PROCESSING GROUP %d of %d. '\
            'About to connect to hosts [%s]' % (
                (idx + 1), len(group_names), group_ip_string)
        scan_task.log_message(log_message)
        callback = ConnectResultCallback(result_store, credential,
                                         scan_task.source)
        playbook = {
            'name':
            'attempt connection to systems',
            'hosts':
            group_name,
            'gather_facts':
            False,
            'tasks': [{
                'action': {
                    'module': 'raw',
                    'args': parse_kv('echo "Hello"')
                }
            }]
        }
        result = run_playbook(inventory_file,
                              callback,
                              playbook,
                              extra_vars,
                              use_paramiko,
                              forks=forks)

        if result != TaskQueueManager.RUN_OK:
            new_error_msg = _construct_error_msg(result)
            if result not in [
                    TaskQueueManager.RUN_UNREACHABLE_HOSTS,
                    TaskQueueManager.RUN_FAILED_HOSTS
            ]:
                error_msg += '{}\n'.format(new_error_msg)

    if error_msg != '':
        raise AnsibleError(error_msg)
Ejemplo n.º 24
0
    def run_module(self,
                   module_name='ping',
                   module_args=None,
                   hosts="all",
                   inventory_file=None,
                   **kwargs):

        if not module_args:
            check_raw = module_name in ('command', 'win_command', 'shell',
                                        'win_shell', 'script', 'raw')
            module_args = parse_kv(constants.DEFAULT_MODULE_ARGS, check_raw)

        conn_pass = None
        if 'conn_pass' in kwargs:
            conn_pass = kwargs['conn_pass']

        become_pass = None
        if 'become_pass' in kwargs:
            become_pass = kwargs['become_pass']

        passwords = {'conn_pass': conn_pass, 'become_pass': become_pass}

        options = self._build_opt_dict(inventory_file, **kwargs)

        variable_manager = vars.VariableManager()
        loader = dataloader.DataLoader()
        variable_manager.extra_vars = options.extra_vars

        ansible_inv = inventory.Inventory(loader=loader,
                                          variable_manager=variable_manager,
                                          host_list=options.inventory)
        variable_manager.set_inventory(ansible_inv)
        ansible_inv.subset(options.subset)

        play_ds = self._play_ds(hosts, module_name, module_args)
        play_obj = play.Play().load(play_ds,
                                    variable_manager=variable_manager,
                                    loader=loader)

        try:
            tqm = task_queue_manager.TaskQueueManager(
                inventory=ansible_inv,
                variable_manager=variable_manager,
                loader=loader,
                options=options,
                passwords=passwords,
                stdout_callback='minimal',
                run_additional_callbacks=True)

            # There is no public API for adding callbacks, hence we use a
            # private property to add callbacks
            tqm._callback_plugins.extend(self._callbacks)

            result = tqm.run(play_obj)
        finally:
            if tqm:
                tqm.cleanup()
            if loader:
                loader.cleanup_all_tmp_files()

        stats = tqm._stats
        result = self._process_stats(stats)
        return result
Ejemplo n.º 25
0
    def run_module(self,
                   module_name='ping',
                   module_args=None,
                   hosts="all",
                   inventory_file=None,
                   **kwargs):

        if not module_args:
            check_raw = module_name in ('command', 'win_command', 'shell',
                                        'win_shell', 'script', 'raw')
            module_args = parse_kv(constants.DEFAULT_MODULE_ARGS, check_raw)

        conn_pass = None
        if 'conn_pass' in kwargs:
            conn_pass = kwargs['conn_pass']

        become_pass = None
        if 'become_pass' in kwargs:
            become_pass = kwargs['become_pass']

        passwords = {'conn_pass': conn_pass, 'become_pass': become_pass}

        options = self._build_opt_dict(inventory_file, **kwargs)
        # dynamically load any plugins
        get_all_plugin_loaders()

        loader = dataloader.DataLoader()
        inventory = InventoryManager(loader=loader, sources=options.inventory)

        # create the variable manager, which will be shared throughout
        # the code, ensuring a consistent view of global variables
        variable_manager = VariableManager(loader=loader, inventory=inventory)
        options.extra_vars = {
            six.u(key): six.u(value)
            for key, value in options.extra_vars.items()
        }
        variable_manager.extra_vars = cli.load_extra_vars(loader, options)

        inventory.subset(options.subset)

        play_ds = self._play_ds(hosts, module_name, module_args)
        play_obj = play.Play().load(play_ds,
                                    variable_manager=variable_manager,
                                    loader=loader)

        try:
            tqm = task_queue_manager.TaskQueueManager(
                inventory=inventory,
                variable_manager=variable_manager,
                loader=loader,
                options=options,
                passwords=passwords,
                stdout_callback='minimal',
                run_additional_callbacks=True)

            # There is no public API for adding callbacks, hence we use a
            # private property to add callbacks
            tqm._callback_plugins.extend(self._callbacks)

            result = tqm.run(play_obj)
        finally:
            if tqm:
                tqm.cleanup()
            if loader:
                loader.cleanup_all_tmp_files()

        stats = tqm._stats
        result = self._process_stats(stats)
        return result
Ejemplo n.º 26
0
 def check_parse_kv(self, args, expected):
     tools.eq_(parse_kv(args), expected)
Ejemplo n.º 27
0
def test_parse_kv(args, expected):
    assert parse_kv(args) == expected
Ejemplo n.º 28
0
    def default(self, arg, forceshell=False):
        """ actually runs modules """
        if arg.startswith("#"):
            return False

        if not self.cwd:
            display.error("No host found")
            return False

        if arg.split()[0] in self.modules:
            module = arg.split()[0]
            module_args = ' '.join(arg.split()[1:])
        else:
            module = 'shell'
            module_args = arg

        if forceshell is True:
            module = 'shell'
            module_args = arg

        result = None
        try:
            check_raw = module in ('command', 'shell', 'script', 'raw')
            play_ds = dict(
                name="Ansible Shell",
                hosts=self.cwd,
                gather_facts='no',
                tasks=[
                    dict(action=dict(module=module,
                                     args=parse_kv(module_args,
                                                   check_raw=check_raw)))
                ],
                remote_user=self.remote_user,
                become=self.become,
                become_user=self.become_user,
                become_method=self.become_method,
                check_mode=self.check_mode,
                diff=self.diff,
            )
            play = Play().load(play_ds,
                               variable_manager=self.variable_manager,
                               loader=self.loader)
        except Exception as e:
            display.error(u"Unable to build command: %s" % to_text(e))
            return False

        try:
            cb = 'minimal'  # FIXME: make callbacks configurable
            # now create a task queue manager to execute the play
            self._tqm = None
            try:
                self._tqm = TaskQueueManager(
                    inventory=self.inventory,
                    variable_manager=self.variable_manager,
                    loader=self.loader,
                    passwords=self.passwords,
                    stdout_callback=cb,
                    run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
                    run_tree=False,
                    forks=self.forks,
                )

                result = self._tqm.run(play)
            finally:
                if self._tqm:
                    self._tqm.cleanup()
                if self.loader:
                    self.loader.cleanup_all_tmp_files()

            if result is None:
                display.error("No hosts found")
                return False
        except KeyboardInterrupt:
            display.error('User interrupted execution')
            return False
        except Exception as e:
            display.error(to_text(e))
            # FIXME: add traceback in very very verbose mode
            return False
Ejemplo n.º 29
0
    def default(self, arg, forceshell=False):
        """ actually runs modules """
        if arg.startswith("#"):
            return False

        if not self.cwd:
            display.error("No host found")
            return False

        # defaults
        module = 'shell'
        module_args = arg

        if forceshell is not True:
            possible_module, *possible_args = arg.split()
            if module_loader.find_plugin(possible_module):
                # we found module!
                module = possible_module
                if possible_args:
                    module_args = ' '.join(possible_args)
                else:
                    module_args = ''

        if self.callback:
            cb = self.callback
        elif C.DEFAULT_LOAD_CALLBACK_PLUGINS and C.DEFAULT_STDOUT_CALLBACK != 'default':
            cb = C.DEFAULT_STDOUT_CALLBACK
        else:
            cb = 'minimal'

        result = None
        try:
            check_raw = module in C._ACTION_ALLOWS_RAW_ARGS
            task = dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw)), timeout=self.task_timeout)
            play_ds = dict(
                name="Ansible Shell",
                hosts=self.cwd,
                gather_facts='no',
                tasks=[task],
                remote_user=self.remote_user,
                become=self.become,
                become_user=self.become_user,
                become_method=self.become_method,
                check_mode=self.check_mode,
                diff=self.diff,
                collections=self.collections,
            )
            play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader)
        except Exception as e:
            display.error(u"Unable to build command: %s" % to_text(e))
            return False

        try:
            # now create a task queue manager to execute the play
            self._tqm = None
            try:
                self._tqm = TaskQueueManager(
                    inventory=self.inventory,
                    variable_manager=self.variable_manager,
                    loader=self.loader,
                    passwords=self.passwords,
                    stdout_callback=cb,
                    run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
                    run_tree=False,
                    forks=self.forks,
                )

                result = self._tqm.run(play)
                display.debug(result)
            finally:
                if self._tqm:
                    self._tqm.cleanup()
                if self.loader:
                    self.loader.cleanup_all_tmp_files()

            if result is None:
                display.error("No hosts found")
                return False
        except KeyboardInterrupt:
            display.error('User interrupted execution')
            return False
        except Exception as e:
            if self.verbosity >= 3:
                import traceback
                display.v(traceback.format_exc())
            display.error(to_text(e))
            return False
Ejemplo n.º 30
0
    def run(self, terms, variables=None, **kwargs):

        netbox_api_token = kwargs.get("token")
        netbox_api_endpoint = kwargs.get("api_endpoint")
        netbox_private_key_file = kwargs.get("key_file")
        netbox_api_filter = kwargs.get("api_filter")
        netbox_raw_return = kwargs.get("raw_data")

        if not isinstance(terms, list):
            terms = [terms]

        try:
            netbox = pynetbox.api(
                netbox_api_endpoint,
                token=netbox_api_token,
                private_key_file=netbox_private_key_file,
            )
        except FileNotFoundError:
            raise AnsibleError(
                "%s cannot be found. Please make sure file exists." %
                netbox_private_key_file)

        results = []
        for term in terms:

            try:
                endpoint = get_endpoint(netbox, term)
            except KeyError:
                raise AnsibleError(
                    "Unrecognised term %s. Check documentation" % term)

            Display().vvvv(
                u"Netbox lookup for %s to %s using token %s filter %s" %
                (term, netbox_api_endpoint, netbox_api_token,
                 netbox_api_filter))

            if netbox_api_filter:
                filter = parse_kv(netbox_api_filter)

                Display().vvvv("filter is %s" % filter)

                for res in endpoint.filter(**filter):

                    Display().vvvvv(pformat(dict(res)))

                    if netbox_raw_return:
                        results.append(dict(res))

                    else:
                        key = dict(res)["id"]
                        result = {key: dict(res)}
                        results.extend(self._flatten_hash_to_list(result))

            else:
                for res in endpoint.all():

                    Display().vvvvv(pformat(dict(res)))

                    if netbox_raw_return:
                        results.append(dict(res))

                    else:
                        key = dict(res)["id"]
                        result = {key: dict(res)}
                        results.extend(self._flatten_hash_to_list(result))

        return results
Ejemplo n.º 31
0
    def run(self, terms, variables=None, **kwargs):
        if not isinstance(terms, list):
            terms = [terms]

        ctx = {}
        while len(terms) > 0 and isinstance(terms[0], dict):
            # Allow specifying a list of terms as a 'terms' parameter

            item = terms.pop(0)
            if 'terms' in item:
                terms.extend(item.pop('terms'))

            ctx.update(item)
        ctx = fill_context(ctx, variables, **kwargs)

        # Prepare per-term inject, making named context available, if any

        search_inject = variables.copy()

        # Extract search description from context (it may contain references
        # to {{term}}, which cannot be interpolated just yet, as the term
        # variable is still undefined.

        per_item_ctx = {
            'context': ctx.pop('context', None),
            'base': ctx.pop('base', ''),
            'scope': ctx.pop('scope', 'subtree'),
            'filter': ctx.pop('filter', None)
        }
        # At this point, no term-specific items remain in ctx, and we can
        # do template substitution for connection parameters.

        try:
            ctx = self.render_template(variables, ctx)
        except (Exception, ) as e:
            raise errors.AnsibleError(
                'exception while preparing LDAP parameters: %s' % e)
        self._display.vv("LDAP config: %s" % hide_pw(ctx))

        # Compute attribute list and attribute properties

        base_args = {}
        attr_props = {}
        single_attr = None
        value_spec = ctx.get('value')
        if value_spec is not None and not isinstance(value_spec, list):
            value_spec = [value_spec]
        if value_spec is not None:
            for attr in value_spec:
                if not isinstance(attr, dict):
                    attr_props[attr] = None
                else:
                    for attr_name, attr_prop_dict in attr.items():
                        if not isinstance(attr_prop_dict, dict):
                            attr_prop_dict = parse_kv(attr_prop_dict)
                        attr_props[attr_name] = attr_prop_dict

            base_args['attrlist'] = \
                [a.encode('ASCII') for a in attr_props
                 if attr_props[a] is None
                 or not attr_props[a].get('skip', False)]

            if len(base_args['attrlist']) == 1:
                single_attr = base_args['attrlist'][0]

        self._display.vv('Attribute props: %s' % attr_props)

        key_attr = ctx.get('key')
        if key_attr is not None \
                and key_attr != 'dn' \
                and 'attrlist' in base_args \
                and key_attr not in base_args['attrlist']:
            base_args['attrlist'].append(key_attr.encode('ASCII'))

        if sys.version_info[0] > 2:
            try:
                base_args['attrlist'] = [
                    a.decode('ASCII') for a in base_args['attrlist']
                ]
            except KeyError:
                pass

        # Connect and bind
        with LookupModule.__ldap_library_lock:
            LookupModule.set_ldap_library_options(ctx)
            lo = ldap.initialize(ctx['url'])
            # StartTLS if required
            if ctx.get('tls', False):
                lo.start_tls_s()
            if ctx.get('auth', 'simple') == 'gssapi':
                auth_tokens = ldap.sasl.gssapi()
                lo.sasl_interactive_bind_s('', auth_tokens)
            else:
                # bindpw may be an AnsibleVaultEncryptedUnicode, which ldap doesn't
                # know anything about, so cast to unicode explicitly now.

                pw = ctx.get('bindpw', '')
                if sys.version_info[0] <= 2:
                    pw = unicode(pw)
                lo.simple_bind_s(ctx.get('binddn', ''), pw)

        ret = []

        # If no terms are provided, assume that the user specified all
        # aspects of the search with no reference to {{term}}.

        if terms == []:
            terms = [None]

        for term in terms:
            if isinstance(term, dict):
                raise errors.AnsibleError(
                    'context parameters must come before search terms')

            # Compute templated search parameters

            this_item_ctx = dict(ctx)
            this_item_ctx.update(per_item_ctx)

            search_inject['term'] = term
            search_inject['context'] = this_item_ctx.get('context')
            search_desc = self.render_template(search_inject, this_item_ctx)
            self._display.vv('LDAP search, expanded: %s' %
                             hide_pw(search_desc))

            # Perform search

            base = search_desc['base']
            scope = getattr(ldap, 'SCOPE_%s' % search_desc['scope'].upper())
            args = base_args.copy()
            if search_desc['filter'] is not None:
                args['filterstr'] = search_desc['filter']

            lr = lo.search_s(base, scope, **args)

            # Process results

            for dn, attrs in lr:
                if single_attr is not None:
                    if single_attr == 'dn':
                        items = [dn]
                    else:
                        items = attrs.get(single_attr, [])

                    p = attr_props.get(single_attr) or {}
                    if key_attr is not None:
                        if key_attr == 'term':
                            key = term
                        elif key_attr == 'dn':
                            key = dn
                        else:
                            key = attrs[key_attr][0]
                        ret.extend([{
                            key_attr: key,
                            single_attr: encode(p, item)
                        } for item in items])
                    else:
                        ret.extend([encode(p, item) for item in items])

                else:
                    item = {'term': term, 'dn': dn}
                    for a in attrs:
                        p = attr_props.get(a) or {}
                        if not p.get('skip', False):
                            vlist = []
                            for v in attrs[a]:
                                vlist.append(encode(p, v))

                            if 'join' in p:
                                item[a] = p['join'].join(vlist)
                            elif len(vlist) > 1 \
                                    or p.get('always_list', False):
                                item[a] = vlist
                            else:
                                item[a] = vlist[0]

                    ret.append(item)

        return ret
Ejemplo n.º 32
0
    def run_cmd(name, host_list, module, arg, sudo, forks):
        sources = ','.join(host_list)
        if len(host_list) == 1:
            sources += ','

        # initialize needed objects
        Options = namedtuple('Options', [
            'connection', 'module_path', 'forks', 'remote_user',
            'private_key_file', 'ssh_common_args', 'ssh_extra_args',
            'sftp_extra_args', 'scp_extra_args', 'become', 'become_method',
            'become_user', 'verbosity', 'check', 'diff'
        ])
        loader = DataLoader()

        options = Options(connection='ssh',
                          module_path=None,
                          forks=forks,
                          remote_user="******",
                          private_key_file=None,
                          ssh_common_args=None,
                          ssh_extra_args=None,
                          sftp_extra_args=None,
                          scp_extra_args=None,
                          become=sudo,
                          become_method='sudo',
                          become_user='******',
                          verbosity=None,
                          check=False,
                          diff=False)

        passwords = dict()

        # create inventory and pass to var manager
        inventory = InventoryManager(loader=loader, sources=sources)
        variable_manager = VariableManager(loader=loader, inventory=inventory)

        check_raw = module in ('command', 'shell', 'script', 'raw')

        # create play with tasks
        play_source = dict(
            name=name,  # likes this "taskname#taskid_123@projectname",
            hosts=host_list,
            gather_facts='no',
            tasks=[
                dict(action=dict(module=module,
                                 args=parse_kv(arg, check_raw=check_raw)))
            ])
        play = Play().load(play_source,
                           variable_manager=variable_manager,
                           loader=loader)

        tqm = None
        try:
            tqm = TaskQueueManager(
                inventory=inventory,
                variable_manager=variable_manager,
                loader=loader,
                options=options,
                passwords=passwords,
                stdout_callback=CallbackModule(),
            )

            rc = tqm.run(play)
            detail = tqm._stdout_callback.std_lines
        finally:
            if tqm is not None:
                tqm.cleanup()
        return {'rc': rc, 'detail': detail}
Ejemplo n.º 33
0
 def check_parse_kv(self, args, expected):
     tools.eq_(parse_kv(args), expected)
Ejemplo n.º 34
0
def boilerplate_module(modfile, args, interpreters, check, destfile):
    """ simulate what ansible does with new style modules """

    # module_fh = open(modfile)
    # module_data = module_fh.read()
    # module_fh.close()

    # replacer = module_common.ModuleReplacer()
    loader = DataLoader()

    # included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1

    complex_args = {}

    # default selinux fs list is pass in as _ansible_selinux_special_fs arg
    complex_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
    complex_args['_ansible_tmpdir'] = C.DEFAULT_LOCAL_TMP
    complex_args['_ansible_keep_remote_files'] = C.DEFAULT_KEEP_REMOTE_FILES
    complex_args['_ansible_version'] = __version__

    if args.startswith("@"):
        # Argument is a YAML file (JSON is a subset of YAML)
        complex_args = utils_vars.combine_vars(complex_args,
                                               loader.load_from_file(args[1:]))
        args = ''
    elif args.startswith("{"):
        # Argument is a YAML document (not a file)
        complex_args = utils_vars.combine_vars(complex_args, loader.load(args))
        args = ''

    if args:
        parsed_args = parse_kv(args)
        complex_args = utils_vars.combine_vars(complex_args, parsed_args)

    task_vars = interpreters

    if check:
        complex_args['_ansible_check_mode'] = True

    modname = os.path.basename(modfile)
    modname = os.path.splitext(modname)[0]
    (module_data, module_style,
     shebang) = module_common.modify_module(modname,
                                            modfile,
                                            complex_args,
                                            Templar(loader=loader),
                                            task_vars=task_vars)

    if module_style == 'new' and '_ANSIBALLZ_WRAPPER = True' in to_native(
            module_data):
        module_style = 'ansiballz'

    modfile2_path = os.path.expanduser(destfile)
    print("* including generated source, if any, saving to: %s" %
          modfile2_path)
    if module_style not in ('ansiballz', 'old'):
        print("* this may offset any line numbers in tracebacks/debuggers!")
    modfile2 = open(modfile2_path, 'wb')
    modfile2.write(module_data)
    modfile2.close()
    modfile = modfile2_path

    return (modfile2_path, modname, module_style)
Ejemplo n.º 35
0
def boilerplate_module(modfile, args, interpreter, check, destfile):
    """ simulate what ansible does with new style modules """

    loader = DataLoader()

    #included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1

    complex_args = {}
    if args.startswith("@"):
        # Argument is a YAML file (JSON is a subset of YAML)
        complex_args = utils_vars.combine_vars(complex_args,
                                               loader.load_from_file(args[1:]))
        args = ''
    elif args.startswith("{"):
        # Argument is a YAML document (not a file)
        complex_args = utils_vars.combine_vars(complex_args, loader.load(args))
        args = ''

    if args:
        parsed_args = parse_kv(args)
        complex_args = utils_vars.combine_vars(complex_args, parsed_args)

    task_vars = {}
    if interpreter:
        if '=' not in interpreter:
            print(
                "interpreter must by in the form of ansible_python_interpreter=/usr/bin/python"
            )
            sys.exit(1)
        interpreter_type, interpreter_path = interpreter.split('=')
        if not interpreter_type.startswith('ansible_'):
            interpreter_type = 'ansible_%s' % interpreter_type
        if not interpreter_type.endswith('_interpreter'):
            interpreter_type = '%s_interpreter' % interpreter_type
        task_vars[interpreter_type] = interpreter_path

    if check:
        complex_args['_ansible_check_mode'] = True

    modname = os.path.basename(modfile)
    modname = os.path.splitext(modname)[0]
    (module_data, module_style,
     shebang) = module_common.modify_module(modname,
                                            modfile,
                                            complex_args,
                                            task_vars=task_vars)

    if module_style == 'new' and 'ZIPLOADER_WRAPPER = True' in module_data:
        module_style = 'ziploader'

    modfile2_path = os.path.expanduser(destfile)
    print("* including generated source, if any, saving to: %s" %
          modfile2_path)
    if module_style not in ('ziploader', 'old'):
        print("* this may offset any line numbers in tracebacks/debuggers!")
    modfile2 = open(modfile2_path, 'w')
    modfile2.write(module_data)
    modfile2.close()
    modfile = modfile2_path

    return (modfile2_path, modname, module_style)
def test_parse_kv(args, expected):
    assert parse_kv(args) == expected
Ejemplo n.º 37
0
    def run(self):

        # Note: slightly wrong, this is written so that implicit localhost
        # Manage passwords
        sshpass = None
        becomepass = None
        vault_pass = None
        passwords = {}

        # don't deal with privilege escalation or passwords when we don't need to
        if not self.options.listhosts and not self.options.listtasks and not self.options.listtags and not self.options.syntax:
            self.normalize_become_options()
            (sshpass, becomepass) = self.ask_passwords()
            passwords = {'conn_pass': sshpass, 'become_pass': becomepass}

        if self.options.vault_password_file:
            # read vault_pass from a file
            vault_pass = read_vault_file(self.options.vault_password_file)
        elif self.options.ask_vault_pass:
            vault_pass = self.ask_vault_passwords(ask_vault_pass=True,
                                                  ask_new_vault_pass=False,
                                                  confirm_new=False)[0]

        loader = DataLoader(vault_password=vault_pass)

        extra_vars = {}
        for extra_vars_opt in self.options.extra_vars:
            extra_vars_opt = to_unicode(extra_vars_opt, errors='strict')
            if extra_vars_opt.startswith(u"@"):
                # Argument is a YAML file (JSON is a subset of YAML)
                data = loader.load_from_file(extra_vars_opt[1:])
            elif extra_vars_opt and extra_vars_opt[0] in u'[{':
                # Arguments as YAML
                data = loader.load(extra_vars_opt)
            else:
                # Arguments as Key-value
                data = parse_kv(extra_vars_opt)
            extra_vars = combine_vars(extra_vars, data)

        # FIXME: this should be moved inside the playbook executor code
        only_tags = self.options.tags.split(",")
        skip_tags = self.options.skip_tags
        if self.options.skip_tags is not None:
            skip_tags = self.options.skip_tags.split(",")

        # initial error check, to make sure all specified playbooks are accessible
        # before we start running anything through the playbook executor
        for playbook in self.args:
            if not os.path.exists(playbook):
                raise AnsibleError("the playbook: %s could not be found" %
                                   playbook)
            if not (os.path.isfile(playbook)
                    or stat.S_ISFIFO(os.stat(playbook).st_mode)):
                raise AnsibleError(
                    "the playbook: %s does not appear to be a file" % playbook)

        # create the variable manager, which will be shared throughout
        # the code, ensuring a consistent view of global variables
        variable_manager = VariableManager()
        variable_manager.extra_vars = extra_vars

        # create the inventory, and filter it based on the subset specified (if any)
        inventory = Inventory(loader=loader,
                              variable_manager=variable_manager,
                              host_list=self.options.inventory)
        variable_manager.set_inventory(inventory)

        # (which is not returned in list_hosts()) is taken into account for
        # warning if inventory is empty.  But it can't be taken into account for
        # checking if limit doesn't match any hosts.  Instead we don't worry about
        # limit if only implicit localhost was in inventory to start with.
        #
        # Fix this when we rewrite inventory by making localhost a real host (and thus show up in list_hosts())
        no_hosts = False
        if len(inventory.list_hosts()) == 0:
            # Empty inventory
            self.display.warning(
                "provided hosts list is empty, only localhost is available")
            no_hosts = True
        inventory.subset(self.options.subset)
        if len(inventory.list_hosts()) == 0 and no_hosts is False:
            # Invalid limit
            raise AnsibleError("Specified --limit does not match any hosts")

        # create the playbook executor, which manages running the plays via a task queue manager
        pbex = PlaybookExecutor(playbooks=self.args,
                                inventory=inventory,
                                variable_manager=variable_manager,
                                loader=loader,
                                display=self.display,
                                options=self.options,
                                passwords=passwords)

        results = pbex.run()

        if isinstance(results, list):
            for p in results:

                self.display.display('\nplaybook: %s\n' % p['playbook'])
                for play in p['plays']:
                    if self.options.listhosts:
                        self.display.display("\n  %s (%s): host count=%d" %
                                             (play['name'], play['pattern'],
                                              len(play['hosts'])))
                        for host in play['hosts']:
                            self.display.display("    %s" % host)
                    if self.options.listtasks:  #TODO: do we want to display block info?
                        self.display.display("\n  %s" % (play['name']))
                        for task in play['tasks']:
                            self.display.display("    %s" % task)
                    if self.options.listtags:  #TODO: fix once we figure out block handling above
                        self.display.display("\n  %s: tags count=%d" %
                                             (play['name'], len(play['tags'])))
                        for tag in play['tags']:
                            self.display.display("    %s" % tag)
            return 0
        else:
            return results
Ejemplo n.º 38
0
class LookupModule(LookupBase):

    # We may have to modify LDAP library options when making a new LDAP
    # connection (e.g. to ignore server certificate validation). We don't
    # want any other thread to be attempting to modify library options at
    # the same time.
    #
    # We hope no agent besides this library is trying to set library options
    # simultaneously. Unfortunately, we don't have a way to ensure that.
    # Use library-level options with care.

    __ldap_library_lock = threading.Lock()

    def render_template(self, inject, v):
        return Templar(loader=self._loader, variables=inject).template(v)

    def run(self, terms, variables=None, **kwargs):
        if not isinstance(terms, list):
            terms = [terms]

        ctx = {}
        while len(terms) > 0 and isinstance(terms[0], dict):
            ctx.update(terms.pop(0))
        ctx = fill_context(ctx, variables, **kwargs)

        # Prepare per-term inject, making named context available, if any

        search_inject = variables.copy()

        # Extract search description from context (it may contain references
        # to {{term}}, which cannot be interpolated just yet, as the term
        # variable is still undefined.

        per_item_ctx = {
            'context': ctx.pop('context', None),
            'base': ctx.pop('base', ''),
            'scope': ctx.pop('scope', 'subtree'),
            'filter': ctx.pop('filter', None)
        }
        # At this point, no term-specific items remain in ctx, and we can
        # do template substitution for connection parameters

        try:
            ctx = self.render_template(variables, ctx)
        except Exception, e:
            raise errors.AnsibleError(
                'exception while preparing LDAP parameters: %s' % e)
        self._display.vv("LDAP config: %s" % ctx)

        # Compute attribute list and attribute properties

        base_args = {}
        attr_props = {}
        single_attr = None
        value_spec = ctx.get('value')
        if value_spec is not None and not isinstance(value_spec, list):
            value_spec = [value_spec]
        if value_spec is not None:
            for attr in value_spec:
                if not isinstance(attr, dict):
                    attr_props[attr] = None
                else:
                    for attr_name, attr_prop_dict in attr.items():
                        if not isinstance(attr_prop_dict, dict):
                            attr_prop_dict = parse_kv(attr_prop_dict)
                        attr_props[attr_name] = attr_prop_dict

            base_args['attrlist'] = \
                [a.encode('ASCII') for a in attr_props
                 if attr_props[a] is None
                 or not attr_props[a].get('skip', False)]

            if len(base_args['attrlist']) == 1:
                single_attr = base_args['attrlist'][0]

        self._display.vv('Attribute props: %s' % attr_props)

        key_attr = ctx.get('key')
        if key_attr is not None \
                and key_attr != 'dn' \
                and 'attrlist' in base_args \
                and key_attr not in base_args['attrlist']:
            base_args['attrlist'].append(key_attr.encode('ASCII'))

        # Connect and bind
        with LookupModule.__ldap_library_lock:
            LookupModule.set_ldap_library_options(ctx)
            lo = ldap.initialize(ctx['url'])
            lo.simple_bind_s(ctx.get('binddn', ''), ctx.get('bindpw', ''))

        ret = []

        # If no terms are provided, assume that the user specified all
        # aspects of the search with no reference to {{term}}.

        if terms == []:
            terms = [None]

        for term in terms:
            if isinstance(term, dict):
                raise errors.AnsibleError(
                    'context parameters must come before search terms')

            # Compute templated search parameters

            this_item_ctx = dict(ctx)
            this_item_ctx.update(per_item_ctx)

            search_inject['term'] = term
            search_inject['context'] = this_item_ctx.get('context')
            search_desc = self.render_template(search_inject, this_item_ctx)
            self._display.vv('LDAP search, expanded: %s' % search_desc)

            # Perform search

            base = search_desc['base']
            scope = getattr(ldap, 'SCOPE_%s' % search_desc['scope'].upper())
            args = base_args.copy()
            if search_desc['filter'] is not None:
                args['filterstr'] = search_desc['filter']

            lr = lo.search_s(base, scope, **args)

            # Process results

            for dn, attrs in lr:
                if single_attr is not None:
                    if single_attr == 'dn':
                        items = [dn]
                    else:
                        items = attrs.get(single_attr, [])

                    p = attr_props.get(single_attr) or {}
                    if key_attr is not None:
                        if key_attr == 'term':
                            key = term
                        elif key_attr == 'dn':
                            key = dn
                        else:
                            key = attrs[key_attr][0]
                        ret.extend([{
                            key_attr: key,
                            single_attr: encode(p, item)
                        } for item in items])
                    else:
                        ret.extend([encode(p, item) for item in items])

                else:
                    item = {'term': term, 'dn': dn}
                    for a in attrs:
                        p = attr_props.get(a) or {}
                        if not p.get('skip', False):
                            vlist = []
                            for v in attrs[a]:
                                vlist.append(encode(p, v))

                            if 'join' in p:
                                item[a] = p['join'].join(vlist)
                            elif len(vlist) > 1 \
                                    or p.get('always_list', False):
                                item[a] = vlist
                            else:
                                item[a] = vlist[0]

                    ret.append(item)

        return ret
Ejemplo n.º 39
0
    def _normalize_parameters(self,
                              thing,
                              action=None,
                              additional_args=dict()):
        '''
        arguments can be fuzzy.  Deal with all the forms.
        '''

        # final args are the ones we'll eventually return, so first update
        # them with any additional args specified, which have lower priority
        # than those which may be parsed/normalized next
        final_args = dict()
        if additional_args:
            if isinstance(additional_args, string_types):
                templar = Templar(loader=None)
                if templar._contains_vars(additional_args):
                    final_args['_variable_params'] = additional_args
                else:
                    raise AnsibleParserError(
                        "Complex args containing variables cannot use bare variables, and must use the full variable style "
                        "('{{var_name}}')")
            elif isinstance(additional_args, dict):
                final_args.update(additional_args)
            else:
                raise AnsibleParserError(
                    'Complex args must be a dictionary or variable string ("{{var}}").'
                )

        # how we normalize depends if we figured out what the module name is
        # yet.  If we have already figured it out, it's a 'new style' invocation.
        # otherwise, it's not

        if action is not None:
            args = self._normalize_new_style_args(thing, action)
        else:
            (action, args) = self._normalize_old_style_args(thing)

            # this can occasionally happen, simplify
            if args and 'args' in args:
                tmp_args = args.pop('args')
                if isinstance(tmp_args, string_types):
                    tmp_args = parse_kv(tmp_args)
                args.update(tmp_args)

        # only internal variables can start with an underscore, so
        # we don't allow users to set them directly in arguments
        if args and action not in ('command', 'win_command', 'shell',
                                   'win_shell', 'script', 'raw'):
            for arg in args:
                arg = to_text(arg)
                if arg.startswith('_ansible_'):
                    raise AnsibleError(
                        "invalid parameter specified for action '%s': '%s'" %
                        (action, arg))

        # finally, update the args we're going to return with the ones
        # which were normalized above
        if args:
            final_args.update(args)

        return (action, final_args)
Ejemplo n.º 40
0
    def default(self, arg, forceshell=False):
        """ actually runs modules """
        if arg.startswith("#"):
            return False

        if not self.options.cwd:
            display.error("No host found")
            return False

        if arg.split()[0] in self.modules:
            module = arg.split()[0]
            module_args = ' '.join(arg.split()[1:])
        else:
            module = 'shell'
            module_args = arg

        if forceshell is True:
            module = 'shell'
            module_args = arg

        self.options.module_name = module

        result = None
        try:
            play_ds = dict(
                name="Ansible Shell",
                hosts=self.options.cwd,
                gather_facts='no',
                #tasks = [ dict(action=dict(module=module, args=parse_kv(module_args)), async=self.options.async, poll=self.options.poll_interval) ]
                tasks=[
                    dict(
                        action=dict(module=module, args=parse_kv(module_args)))
                ])
            play = Play().load(play_ds,
                               variable_manager=self.variable_manager,
                               loader=self.loader)
        except Exception as e:
            display.error(u"Unable to build command: %s" % to_unicode(e))
            return False

        try:
            cb = 'minimal'  #FIXME: make callbacks configurable
            # now create a task queue manager to execute the play
            self._tqm = None
            try:
                self._tqm = TaskQueueManager(
                    inventory=self.inventory,
                    variable_manager=self.variable_manager,
                    loader=self.loader,
                    options=self.options,
                    passwords=self.passwords,
                    stdout_callback=cb,
                    run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
                    run_tree=False,
                )

                result = self._tqm.run(play)
            finally:
                if self._tqm:
                    self._tqm.cleanup()

            if result is None:
                display.error("No hosts found")
                return False
        except KeyboardInterrupt:
            display.error('User interrupted execution')
            return False
        except Exception as e:
            display.error(to_unicode(e))
            #FIXME: add traceback in very very verbose mode
            return False
Ejemplo n.º 41
0
    def default(self, arg, forceshell=False):
        """ actually runs modules """
        if arg.startswith("#"):
            return False

        if not self.options.cwd:
            display.error("No host found")
            return False

        if arg.split()[0] in self.modules:
            module = arg.split()[0]
            module_args = ' '.join(arg.split()[1:])
        else:
            module = 'shell'
            module_args = arg

        if forceshell is True:
            module = 'shell'
            module_args = arg

        self.options.module_name = module

        result = None
        try:
            check_raw = self.options.module_name in ('command', 'shell', 'script', 'raw')
            play_ds = dict(
                name = "Ansible Shell",
                hosts = self.options.cwd,
                gather_facts = 'no',
                tasks = [ dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw)))]
            )
            play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader)
        except Exception as e:
            display.error(u"Unable to build command: %s" % to_text(e))
            return False

        try:
            cb = 'minimal'  # FIXME: make callbacks configurable
            # now create a task queue manager to execute the play
            self._tqm = None
            try:
                self._tqm = TaskQueueManager(
                        inventory=self.inventory,
                        variable_manager=self.variable_manager,
                        loader=self.loader,
                        options=self.options,
                        passwords=self.passwords,
                        stdout_callback=cb,
                        run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
                        run_tree=False,
                    )

                result = self._tqm.run(play)
            finally:
                if self._tqm:
                    self._tqm.cleanup()
                if self.loader:
                    self.loader.cleanup_all_tmp_files()

            if result is None:
                display.error("No hosts found")
                return False
        except KeyboardInterrupt:
            display.error('User interrupted execution')
            return False
        except Exception as e:
            display.error(to_text(e))
            # FIXME: add traceback in very very verbose mode
            return False
Ejemplo n.º 42
0
    def run(self, terms, variables=None, **kwargs):

        netbox_api_token = kwargs.get("token")
        netbox_api_endpoint = kwargs.get("api_endpoint")
        netbox_ssl_verify = kwargs.get("validate_certs", True)
        netbox_private_key_file = kwargs.get("key_file")
        netbox_api_filter = kwargs.get("api_filter")
        netbox_raw_return = kwargs.get("raw_data")

        if not isinstance(terms, list):
            terms = [terms]

        try:
            session = requests.Session()
            session.verify = netbox_ssl_verify

            netbox = pynetbox.api(
                netbox_api_endpoint,
                token=netbox_api_token if netbox_api_token else None,
                private_key_file=netbox_private_key_file,
            )
            netbox.http_session = session
        except FileNotFoundError:
            raise AnsibleError(
                "%s cannot be found. Please make sure file exists." %
                netbox_private_key_file)

        results = []
        for term in terms:

            try:
                endpoint = get_endpoint(netbox, term)
            except KeyError:
                raise AnsibleError(
                    "Unrecognised term %s. Check documentation" % term)

            Display().vvvv(
                u"Netbox lookup for %s to %s using token %s filter %s" %
                (term, netbox_api_endpoint, netbox_api_token,
                 netbox_api_filter))

            if netbox_api_filter:
                args_split = split_args(netbox_api_filter)
                args = [parse_kv(x) for x in args_split]
                filter = {}
                for arg in args:
                    for k, v in arg.items():
                        if k not in filter:
                            filter[k] = list()
                            filter[k].append(v)
                        else:
                            filter[k].append(v)

                Display().vvvv("filter is %s" % filter)

                for res in endpoint.filter(**filter):

                    Display().vvvvv(pformat(dict(res)))

                    if netbox_raw_return:
                        results.append(dict(res))

                    else:
                        key = dict(res)["id"]
                        result = {key: dict(res)}
                        results.extend(self._flatten_hash_to_list(result))

            else:
                for res in endpoint.all():

                    Display().vvvvv(pformat(dict(res)))

                    if netbox_raw_return:
                        results.append(dict(res))

                    else:
                        key = dict(res)["id"]
                        result = {key: dict(res)}
                        results.extend(self._flatten_hash_to_list(result))

        return results
Ejemplo n.º 43
0
    def play(self, target_ip, tasks):
        Options = namedtuple('Options', [
            'connection', 'module_path', 'forks', 'remote_user',
            'private_key_file', 'ssh_common_args', 'ssh_extra_args',
            'sftp_extra_args', 'scp_extra_args', 'become', 'become_method',
            'become_user', 'verbosity', 'check'
        ])
        # initialize needed objects
        variable_manager = VariableManager()
        # TODO load vars
        loader = DataLoader()
        options = Options(connection='ssh',
                          module_path='/etc/ansible/modules',
                          forks=100,
                          remote_user="******",
                          private_key_file="",
                          ssh_common_args=None,
                          ssh_extra_args=None,
                          sftp_extra_args=None,
                          scp_extra_args=None,
                          become=True,
                          become_method="sudo",
                          become_user="******",
                          verbosity=None,
                          check=False)
        passwords = dict(vault_pass='******')

        # create inventory and pass to var manager
        inventory = Inventory(loader=loader, variable_manager=variable_manager, \
                              host_list=[ip for ip in target_ip])
        variable_manager.set_inventory(inventory)

        # create play with tasks
        task_list = []
        for task in tasks:
            # task = "sysctl: name=net.ipv4.ip_forward value=1 state=present
            module, tasks_str = task.split(':', 1)
            # parse args
            kv_args = parse_kv(tasks_str)
            # create datastructure
            task_list.append(
                dict(action=dict(module=module, args=kv_args),
                     register='shell_out'), )
        print(task_list)

        play_source = dict(name="Ansible Play {}".format(target_ip),
                           hosts=target_ip,
                           gather_facts='no',
                           tasks=task_list)
        play = Play().load(play_source,
                           variable_manager=variable_manager,
                           loader=loader)

        # actually run it
        tqm = None
        try:
            tqm = TaskQueueManager(
                inventory=inventory,
                variable_manager=variable_manager,
                loader=loader,
                options=options,
                passwords=passwords,
                # TODO callback must be an instance of CallbackBase or the name of a callback plugin
                stdout_callback='default',
            )
            result = tqm.run(play)
        finally:
            if tqm is not None:
                tqm.cleanup()

        return 0, ""
Ejemplo n.º 44
0
    def runCmd(name, target, module, arg, sudo, forks):
        # initialize needed objects
        variable_manager = VariableManager()
        loader = DataLoader()
        Options = namedtuple('Options', [
            'listtags', 'listtasks', 'listhosts', 'syntax', 'connection',
            'module_path', 'forks', 'remote_user', 'private_key_file',
            'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args',
            'scp_extra_args', 'become', 'become_method', 'become_user',
            'verbosity', 'check'
        ])
        pb_options = Options(listtags=False,
                             listtasks=False,
                             listhosts=False,
                             syntax=False,
                             connection='ssh',
                             module_path=None,
                             forks=forks,
                             remote_user='******',
                             private_key_file=None,
                             ssh_common_args=None,
                             ssh_extra_args=None,
                             sftp_extra_args=None,
                             scp_extra_args=None,
                             become=sudo,
                             become_method='sudo',
                             become_user='******',
                             verbosity=None,
                             check=False)

        passwords = {}

        # create inventory and pass to var manager
        inventory = Inventory(loader=loader, variable_manager=variable_manager)
        variable_manager.set_inventory(inventory)
        check_raw = module in ('command', 'shell', 'script', 'raw')
        # create play with tasks
        play_source = dict(
            name=name,  # likes this "taskname#taskid_123@projectname",
            hosts=target,
            gather_facts='no',
            tasks=[
                dict(action=dict(module=module,
                                 args=parse_kv(arg, check_raw=check_raw)))
            ])
        play = Play().load(play_source,
                           variable_manager=variable_manager,
                           loader=loader)

        tqm = None
        try:
            tqm = TaskQueueManager(
                inventory=inventory,
                variable_manager=variable_manager,
                loader=loader,
                options=pb_options,
                passwords=passwords,
                stdout_callback=CallbackModule(),
            )

            # tqm._stdout_callback.reset_output()
            rc = tqm.run(play)
            detail = tqm._stdout_callback.std_lines
            # tqm._stdout_callback.reset_output()
        finally:
            if tqm is not None:
                tqm.cleanup()
        return {'rc': rc, 'detail': detail}