Beispiel #1
0
 def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
     # this number is arbitrary, but it seems sane
     if level > 20:
         raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
     for role in roles:
         role_path,role_vars = self._get_role_path(role)
         # the meta directory contains the yaml that should
         # hold the list of dependencies (if any)
         meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
         if os.path.isfile(meta):
             data = utils.parse_yaml_from_file(meta)
             if data:
                 dependencies = data.get('dependencies',[])
                 for dep in dependencies:
                     (dep_path,dep_vars) = self._get_role_path(dep)
                     vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
                     vars_data = {}
                     if os.path.isfile(vars):
                         vars_data = utils.parse_yaml_from_file(vars)
                     dep_vars.update(role_vars)
                     for k in passed_vars.keys():
                         if not k in dep_vars:
                             dep_vars[k] = passed_vars[k]
                     for k in vars_data.keys():
                         if not k in dep_vars:
                             dep_vars[k] = vars_data[k]
                     if 'role' in dep_vars:
                         del dep_vars['role']
                     self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
                     dep_stack.append([dep,dep_path,dep_vars])
         # only add the current role when we're at the top level,
         # otherwise we'll end up in a recursive loop 
         if level == 0:
             dep_stack.append([role,role_path,role_vars])
     return dep_stack
Beispiel #2
0
 def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
     # this number is arbitrary, but it seems sane
     if level > 20:
         raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
     for role in roles:
         role_path,role_vars = self._get_role_path(role)
         # the meta directory contains the yaml that should
         # hold the list of dependencies (if any)
         meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
         if os.path.isfile(meta):
             data = utils.parse_yaml_from_file(meta)
             if data:
                 dependencies = data.get('dependencies',[])
                 for dep in dependencies:
                     (dep_path,dep_vars) = self._get_role_path(dep)
                     vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
                     vars_data = {}
                     if os.path.isfile(vars):
                         vars_data = utils.parse_yaml_from_file(vars)
                     dep_vars.update(role_vars)
                     for k in passed_vars.keys():
                         if not k in dep_vars:
                             dep_vars[k] = passed_vars[k]
                     for k in vars_data.keys():
                         if not k in dep_vars:
                             dep_vars[k] = vars_data[k]
                     if 'role' in dep_vars:
                         del dep_vars['role']
                     self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
                     dep_stack.append([dep,dep_path,dep_vars])
         # only add the current role when we're at the top level,
         # otherwise we'll end up in a recursive loop 
         if level == 0:
             dep_stack.append([role,role_path,role_vars])
     return dep_stack
Beispiel #3
0
    def run(self, host, vault_password=None):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name),
                        key=lambda g: g.depth)
        groups = [g.name for g in groupz]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in inventory_dir/group_vars/name_of_group
        for group in groups:
            if group in self.group_cache:
                results = self.group_cache[group]
            else:
                group_vars_dir = os.path.join(basedir, "group_vars")
                group_vars_files = vars_files(group_vars_dir, group)
                #if len(group_vars_files) > 1:
                #    raise errors.AnsibleError("Found more than one file for group '%s': %s"
                #                      % (group, group_vars_files))
                for path in group_vars_files:
                    data = utils.parse_yaml_from_file(
                        path, vault_password=vault_password)
                    if type(data) != dict:
                        raise errors.AnsibleError(
                            "%s must be stored as a dictionary/hash" % path)
                    if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                        # let data content override results if needed
                        results = utils.merge_hash(results, data)
                    else:
                        results.update(data)
                self.group_cache[group] = results

        # load vars in inventory_dir/hosts_vars/name_of_host
        host_vars_dir = os.path.join(basedir, "host_vars")
        host_vars_files = vars_files(host_vars_dir, host.name)
        if len(host_vars_files) > 1:
            raise errors.AnsibleError(
                "Found more than one file for host '%s': %s" %
                (host.name, host_vars_files))
        for path in host_vars_files:
            data = utils.parse_yaml_from_file(path,
                                              vault_password=vault_password)
            if type(data) != dict:
                raise errors.AnsibleError(
                    "%s must be stored as a dictionary/hash" % path)
            if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                # let data content override results if needed
                results = utils.merge_hash(results, data)
            else:
                results.update(data)
        return results
Beispiel #4
0
    def _do_conditional_imports(self, vars_files, pattern=None):
        ''' handle the vars_files section, which can contain variables '''

        # FIXME: save parsed variable results in memory to avoid excessive re-reading/parsing
        # FIXME: currently parses imports for hosts not in the pattern, that is not wrong, but it's
        #        not super optimized yet either, because we wouldn't have hit them, ergo
        #        it will raise false errors if there is no defaults variable file without any $vars
        #        in it, which could happen on uncontacted hosts.

        if type(vars_files) != list:
            raise errors.AnsibleError("vars_files must be a list")

        host_list = [
            h for h in self.inventory.list_hosts(pattern)
            if not (h in self.stats.failures or h in self.stats.dark)
        ]

        for host in host_list:
            cache_vars = SETUP_CACHE.get(host, {})
            SETUP_CACHE[host] = cache_vars
            for filename in vars_files:
                if type(filename) == list:
                    # loop over all filenames, loading the first one, and failing if # none found
                    found = False
                    sequence = []
                    for real_filename in filename:
                        filename2 = utils.path_dwim(
                            self.basedir,
                            utils.template(real_filename, cache_vars,
                                           SETUP_CACHE))
                        sequence.append(filename2)
                        if os.path.exists(filename2):
                            found = True
                            data = utils.parse_yaml_from_file(filename2)
                            SETUP_CACHE[host].update(data)
                            self.callbacks.on_import_for_host(host, filename2)
                            break
                        else:
                            self.callbacks.on_not_import_for_host(
                                host, filename2)
                    if not found:
                        raise errors.AnsibleError(
                            "%s: FATAL, no files matched for vars_files import sequence: %s"
                            % (host, sequence))

                else:
                    filename2 = utils.path_dwim(
                        self.basedir,
                        utils.template(filename, cache_vars, SETUP_CACHE))
                    if not os.path.exists(filename2):
                        raise errors.AnsibleError(
                            "no file matched for vars_file import: %s" %
                            filename2)
                    data = utils.parse_yaml_from_file(filename2)
                    SETUP_CACHE[host].update(data)
                    self.callbacks.on_import_for_host(host, filename2)
Beispiel #5
0
    def run(self, host, vault_password=None):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name), key=lambda g: g.depth)
        groups = [ g.name for g in groupz ]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in inventory_dir/group_vars/name_of_group
        for group in groups:
            if group in self.group_cache:
                results = self.group_cache[group]
            else:
                group_vars_dir = os.path.join(basedir, "group_vars")
                group_vars_files = vars_files(group_vars_dir, group)
                #if len(group_vars_files) > 1:
                #    raise errors.AnsibleError("Found more than one file for group '%s': %s"
                #                      % (group, group_vars_files))
                for path in group_vars_files:
                    data = utils.parse_yaml_from_file(path, vault_password=vault_password)
                    if type(data) != dict:
                        raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
                    if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                        # let data content override results if needed
                        results = utils.merge_hash(results, data)
                    else:
                        results.update(data)
                self.group_cache[group] = results

        # load vars in inventory_dir/hosts_vars/name_of_host
        host_vars_dir = os.path.join(basedir, "host_vars")
        host_vars_files = vars_files(host_vars_dir, host.name)
        if len(host_vars_files) > 1:
            raise errors.AnsibleError("Found more than one file for host '%s': %s"
                                  % (host.name, host_vars_files))
        for path in host_vars_files:
            data = utils.parse_yaml_from_file(path, vault_password=vault_password)
            if type(data) != dict:
                raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
            if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                # let data content override results if needed
                results = utils.merge_hash(results, data)
            else:
                results.update(data)
        print("In group_vars_dirs run for %s" % host.name)
        print results
        return results
Beispiel #6
0
    def run(self, host):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name),
                        key=lambda g: g.depth)
        groups = [g.name for g in groupz]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in inventory_dir/group_vars/name_of_group
        for x in groups:
            p = os.path.join(basedir, "group_vars/%s" % x)
            paths = [p, '.'.join([p, 'yml']), '.'.join([p, 'yaml'])]
            for path in paths:
                if os.path.exists(path):
                    data = utils.parse_yaml_from_file(path)
                    if type(data) != dict:
                        raise errors.AnsibleError(
                            "%s must be stored as a dictionary/hash" % path)
                    if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                        # let data content override results if needed
                        results = utils.merge_hash(results, data)
                    else:
                        results.update(data)
                    break

        # load vars in inventory_dir/hosts_vars/name_of_host
        p = os.path.join(basedir, "host_vars/%s" % host.name)
        paths = [p, '.'.join([p, 'yml']), '.'.join([p, 'yaml'])]
        for path in paths:
            if os.path.exists(path):
                data = utils.parse_yaml_from_file(path)
                if type(data) != dict:
                    raise errors.AnsibleError(
                        "%s must be stored as a dictionary/hash" % path)
                if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                    # let data content override results if needed
                    results = utils.merge_hash(results, data)
                else:
                    results.update(data)
                break
        return results
Beispiel #7
0
    def _load_playbook_from_file(self, path):
        '''
        do some top level error checking on playbooks and allow them to include other
        playbooks.
        '''

        playbook_data = utils.parse_yaml_from_file(path)
        accumulated_plays = []

        if type(playbook_data) != list:
            raise errors.AnsibleError(
                "parse error: playbooks must be formatted as a YAML list")

        for play in playbook_data:
            if type(play) != dict:
                raise errors.AnsibleError(
                    "parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s"
                    % play)
            if 'include' in play:
                if len(play.keys()) == 1:
                    included_path = utils.path_dwim(self.basedir,
                                                    play['include'])
                    accumulated_plays.extend(
                        self._load_playbook_from_file(included_path))
                else:
                    raise errors.AnsibleError(
                        "parse error: top level includes cannot be used with other directives: %s"
                        % play)
            else:
                accumulated_plays.append(play)

        return accumulated_plays
Beispiel #8
0
        def process_files(filename,
                          filename2,
                          filename3,
                          filename4,
                          host=None):
            """ pseudo-algorithm for deciding where new vars should go """

            data = utils.parse_yaml_from_file(
                filename4, vault_password=self.vault_password)
            if data:
                if type(data) != dict:
                    raise errors.AnsibleError(
                        "%s must be stored as a dictionary/hash" % filename4)
                if host is not None:
                    target_filename = None
                    if utils.contains_vars(filename2):
                        if not utils.contains_vars(filename3):
                            target_filename = filename3
                        else:
                            target_filename = filename4
                    update_vars_cache(host,
                                      data,
                                      target_filename=target_filename)
                else:
                    self.vars = utils.combine_vars(self.vars, data)
                # we did process this file
                return True
            # we did not process this file
            return False
Beispiel #9
0
def find_children(playbook, playbook_dir):
    if not os.path.exists(playbook[0]):
        return []
    if playbook[1] == 'role':
        playbook_ds = {'roles': [{'role': playbook[0]}]}
    else:
        try:
            playbook_ds = parse_yaml_from_file(playbook[0])
        except AnsibleError as e:
            raise SystemExit(str(e))
    results = []
    basedir = os.path.dirname(playbook[0])
    items = _playbook_items(playbook_ds)
    for item in items:
        for child in play_children(basedir, item, playbook[1], playbook_dir):
            if "$" in child['path'] or "{{" in child['path']:
                continue
            valid_tokens = list()
            for token in split_args(child['path']):
                if '=' in token:
                    break
                valid_tokens.append(token)
            path = ' '.join(valid_tokens)
            results.append({
                'path': path_dwim(basedir, path),
                'type': child['type']
            })
    return results
  def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):

    if not module_args:
      result = dict(failed=True, msg="No source file given")
      return ReturnData(conn=conn, comm_ok=True, result=result)

    source = template.template(self.runner.basedir, module_args, inject)

    if '_original_file' in inject:
      source = utils.path_dwim_relative(inject['_original_file'], 'vars', source, self.runner.basedir, False)
    else:
      source = utils.path_dwim(self.runner.basedir, source)

    data = {}

    if os.path.exists(source):
      data = utils.parse_yaml_from_file(source, vault_password=self.runner.vault_pass)

      if data and type(data) != dict:
        raise errors.AnsibleError("%s must be stored as a dictionary/hash" % source)

    if not hasattr(conn.runner, 'mergeBuffer'):
      conn.runner.mergeBuffer = {}

    if conn.host in conn.runner.mergeBuffer:
      data = utils.merge_hash(conn.runner.mergeBuffer[conn.host], data)

    conn.runner.mergeBuffer[conn.host] = data

    result = dict(ansible_facts=data)
    return ReturnData(conn=conn, comm_ok=True, result=result)
Beispiel #11
0
    def _parse_yaml(self):
        """ Load the inventory from a yaml file.

        returns hosts and groups"""
        data = utils.parse_yaml_from_file(self.inventory_file)

        if type(data) != list:
            raise errors.AnsibleError("YAML inventory should be a list.")

        hosts = []
        groups = {}

        ungrouped = []

        for item in data:
            if type(item) == dict:
                if "group" in item:
                    group_name = item["group"]

                    group_vars = []
                    if "vars" in item:
                        group_vars = item["vars"]

                    group_hosts = []
                    if "hosts" in item:
                        for host in item["hosts"]:
                            host_name = self._parse_yaml_host(host, group_vars)
                            group_hosts.append(host_name)

                    groups[group_name] = group_hosts
                    hosts.extend(group_hosts)

                elif "host" in item:
                    host_name = self._parse_yaml_host(item)
                    hosts.append(host_name)
                    ungrouped.append(host_name)
            else:
                host_name = self._parse_yaml_host(item)
                hosts.append(host_name)
                ungrouped.append(host_name)

        # filter duplicate hosts
        output_hosts = []
        for host in hosts:
            if host not in output_hosts:
                output_hosts.append(host)

        if len(ungrouped) > 0 :
            # hosts can be defined top-level, but also in a group
            really_ungrouped = []
            for host in ungrouped:
                already_grouped = False
                for name, group_hosts in groups.items():
                    if host in group_hosts:
                        already_grouped = True
                if not already_grouped:
                    really_ungrouped.append(host)
            groups["ungrouped"] = really_ungrouped

        return output_hosts, groups
Beispiel #12
0
        def process_files(filename,
                          filename2,
                          filename3,
                          filename4,
                          host=None):
            """ pseudo-algorithm for deciding where new vars should go """

            data = utils.parse_yaml_from_file(
                filename4, vault_password=self.vault_password)
            if data:
                if type(data) != dict:
                    raise errors.AnsibleError(
                        "%s must be stored as a dictionary/hash" % filename4)
                if host is not None:
                    if self._has_vars_in(
                            filename2) and not self._has_vars_in(filename3):
                        # running a host specific pass and has host specific variables
                        # load into setup cache
                        update_vars_cache(host, inject, data, filename4)
                    elif self._has_vars_in(
                            filename3) and not self._has_vars_in(filename4):
                        # handle mixed scope variables in filepath
                        update_vars_cache(host, inject, data, filename4)

                elif not self._has_vars_in(filename4):
                    # found a non-host specific variable, load into vars and NOT
                    # the setup cache
                    if host is not None:
                        self.vars.update(data)
                    else:
                        self.vars = utils.combine_vars(self.vars, data)
Beispiel #13
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            if 'include' in x:
                task_vars = self.vars.copy()
                tokens = shlex.split(x['include'])
                if 'with_items' in x:
                    items = utils.varReplaceWithItems(self.basedir, x['with_items'], task_vars)
                else:
                    items = ['']
                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k,v) = t.split("=", 1)
                        mv[k] = utils.varReplaceWithItems(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file))
                    for y in data:
                         results.append(Task(self,y,module_vars=mv.copy()))
            elif type(x) == dict:
                task_vars = self.vars.copy()
                results.append(Task(self,x,module_vars=task_vars))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Beispiel #14
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            if 'include' in x:
                task_vars = self.vars.copy()
                tokens = shlex.split(x['include'])
                if 'with_items' in x:
                    items = utils.varReplaceWithItems(self.basedir, x['with_items'], task_vars)
                else:
                    items = ['']
                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k,v) = t.split("=", 1)
                        mv[k] = utils.varReplaceWithItems(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file))
                    for y in data:
                         results.append(Task(self,y,module_vars=mv.copy()))
            elif type(x) == dict:
                task_vars = self.vars.copy()
                results.append(Task(self,x,module_vars=task_vars))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Beispiel #15
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            task_vars = self.vars.copy()
            if 'include' in x:
                tokens = shlex.split(x['include'])

                for t in tokens[1:]:
                    (k, v) = t.split("=", 1)
                    task_vars[k] = v
                include_file = tokens[0]
                data = utils.parse_yaml_from_file(
                    utils.path_dwim(self.playbook.basedir, include_file))
            elif type(x) == dict:
                data = [x]
            else:
                raise Exception("unexpected task type")
            for y in data:
                items = y.get('with_items', None)
                if items is None:
                    items = ['']
                elif isinstance(items, basestring):
                    items = utils.varLookup(items, task_vars)
                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    results.append(Task(self, y, module_vars=mv))
        return results
Beispiel #16
0
    def run(self,
            conn,
            tmp,
            module_name,
            module_args,
            inject,
            complex_args=None,
            **kwargs):

        if not module_args:
            result = dict(failed=True, msg="No source file given")
            return ReturnData(conn=conn, comm_ok=True, result=result)

        source = module_args
        source = template.template(self.runner.basedir, source, inject)

        if '_original_file' in inject:
            source = utils.path_dwim_relative(inject['_original_file'], 'vars',
                                              source, self.runner.basedir)
        else:
            source = utils.path_dwim(self.runner.basedir, source)

        if os.path.exists(source):
            data = utils.parse_yaml_from_file(
                source, vault_password=self.runner.vault_pass)
            if type(data) != dict:
                raise errors.AnsibleError(
                    "%s must be stored as a dictionary/hash" % source)
            result = dict(ansible_facts=data)
            return ReturnData(conn=conn, comm_ok=True, result=result)
        else:
            result = dict(failed=True,
                          msg="Source file not found.",
                          file=source)
            return ReturnData(conn=conn, comm_ok=True, result=result)
Beispiel #17
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):

        if not module_args:
            result = dict(failed=True, msg="No source file given")
            return ReturnData(conn=conn, comm_ok=True, result=result)

        source = module_args
        source = template.template(self.runner.basedir, source, inject)

        if '_original_file' in inject:
            source = utils.path_dwim_relative(inject['_original_file'], 'vars', source, self.runner.basedir)
        else:
            source = utils.path_dwim(self.runner.basedir, source)

        if os.path.exists(source):
            data = utils.parse_yaml_from_file(source, vault_password=self.runner.vault_pass)
            if data and type(data) != dict:
                raise errors.AnsibleError("%s must be stored as a dictionary/hash" % source)
            elif data is None:
                data = {}
            result = dict(ansible_facts=data)
            return ReturnData(conn=conn, comm_ok=True, result=result)
        else:
            result = dict(failed=True, msg="Source file not found.", file=source)
            return ReturnData(conn=conn, comm_ok=True, result=result)
Beispiel #18
0
def find_children(playbook, playbook_dir):
    if not os.path.exists(playbook[0]):
        return []
    if playbook[1] == 'role':
        playbook_ds = {'roles': [{'role': playbook[0]}]}
    else:
        try:
            playbook_ds = parse_yaml_from_file(playbook[0])
        except AnsibleError as e:
            raise SystemExit(str(e))
    results = []
    basedir = os.path.dirname(playbook[0])
    items = _playbook_items(playbook_ds)
    for item in items:
        for child in play_children(basedir, item, playbook[1], playbook_dir):
            if "$" in child['path'] or "{{" in child['path']:
                continue
            valid_tokens = list()
            for token in split_args(child['path']):
                if '=' in token:
                    break
                valid_tokens.append(token)
            path = ' '.join(valid_tokens)
            results.append({
                'path': path_dwim(basedir, path),
                'type': child['type']
            })
    return results
Beispiel #19
0
    def _load_playbook_from_file(self, path):
        '''
        do some top level error checking on playbooks and allow them to include other
        playbooks.
        '''

        playbook_data  = utils.parse_yaml_from_file(path)
        accumulated_plays = []

        if type(playbook_data) != list:
           raise errors.AnsibleError(
               "parse error: playbooks must be formatted as a YAML list"
           )

        for play in playbook_data:
           if type(play) != dict:
               raise errors.AnsibleError(
                   "parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play
               )
           if 'include' in play:
               if len(play.keys()) == 1:
                   included_path = utils.path_dwim(self.basedir, play['include'])
                   accumulated_plays.extend(self._load_playbook_from_file(included_path))
               else:
                   raise errors.AnsibleError(
                       "parse error: top level includes cannot be used with other directives: %s" % play
                   )
           else:
               accumulated_plays.append(play)

        return accumulated_plays
Beispiel #20
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            task_vars = self.vars.copy()
            if 'include' in x:
                tokens = shlex.split(x['include'])
                for t in tokens[1:]:
                    (k, v) = t.split("=", 1)
                    task_vars[k] = utils.template(v, task_vars)
                include_file = utils.template(tokens[0], task_vars)
                data = utils.parse_yaml_from_file(
                    utils.path_dwim(self.basedir, include_file))
            elif type(x) == dict:
                data = [x]
            else:
                raise Exception("unexpected task type")

            for y in data:
                mv = task_vars.copy()
                results.append(Task(self, y, module_vars=mv))

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Beispiel #21
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            task_vars = self.vars.copy()
            if 'include' in x:
                tokens = shlex.split(x['include'])

                for t in tokens[1:]:
                    (k,v) = t.split("=", 1)
                    task_vars[k]=v
                include_file = tokens[0]
                data = utils.parse_yaml_from_file(utils.path_dwim(self.playbook.basedir, include_file))
            elif type(x) == dict:
                data = [x]
            else:
                raise Exception("unexpected task type")
            for y in data:
                items = y.get('with_items',None)
                if items is None:
                    items = [ '' ]
                elif isinstance(items, basestring):
                    items = utils.varLookup(items, task_vars)
                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    results.append(Task(self,y,module_vars=mv))
        return results
Beispiel #22
0
    def _parse_playbook(self, playbook):
        ''' load YAML file, including handling for imported files '''
        
        dirname  = os.path.dirname(playbook)
        playbook = utils.parse_yaml_from_file(playbook)

        for play in playbook:
            tasks = play.get('tasks',[])
            handlers = play.get('handlers', [])

            # process tasks in this file as well as imported tasks
            new_tasks = []
            for task in tasks:
                if 'include' in task:
                    self._include_tasks(play, task, dirname, new_tasks)
                else:
                    new_tasks.append(task)
            play['tasks'] = new_tasks

            # process handlers as well as imported handlers
            new_handlers = [] 
            for handler in handlers:
                if 'include' in handler:
                    self._include_handlers(play, handler, dirname, new_handlers)
                else:
                    new_handlers.append(handler)
            play['handlers'] = new_handlers

        return playbook
Beispiel #23
0
    def _do_conditional_imports(self, vars_files, pattern=None):
        ''' handle the vars_files section, which can contain variables '''

        # FIXME: save parsed variable results in memory to avoid excessive re-reading/parsing
        # FIXME: currently parses imports for hosts not in the pattern, that is not wrong, but it's 
        #        not super optimized yet either, because we wouldn't have hit them, ergo
        #        it will raise false errors if there is no defaults variable file without any $vars
        #        in it, which could happen on uncontacted hosts.

        if type(vars_files) != list:
            raise errors.AnsibleError("vars_files must be a list")

        host_list = [ h for h in self.inventory.list_hosts(pattern)
                        if not (h in self.stats.failures or h in self.stats.dark) ]

        for host in host_list:
            cache_vars = SETUP_CACHE.get(host,{})
            SETUP_CACHE[host] = cache_vars
            for filename in vars_files:
                if type(filename) == list:
                    # loop over all filenames, loading the first one, and failing if # none found
                    found = False
                    sequence = []
                    for real_filename in filename:
                        filename2 = utils.path_dwim(self.basedir, utils.template(real_filename, cache_vars, SETUP_CACHE))
                        sequence.append(filename2)
                        if os.path.exists(filename2):
                            found = True
                            data = utils.parse_yaml_from_file(filename2)
                            SETUP_CACHE[host].update(data)
                            self.callbacks.on_import_for_host(host, filename2)
                            break
                        else:
                            self.callbacks.on_not_import_for_host(host, filename2)
                    if not found:
                        raise errors.AnsibleError(
                            "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence)
                        )

                else:
                    filename2 = utils.path_dwim(self.basedir, utils.template(filename, cache_vars, SETUP_CACHE))
                    if not os.path.exists(filename2):
                        raise errors.AnsibleError("no file matched for vars_file import: %s" % filename2)
                    data = utils.parse_yaml_from_file(filename2)
                    SETUP_CACHE[host].update(data)
                    self.callbacks.on_import_for_host(host, filename2)
Beispiel #24
0
def find_children(playbook):
    if not os.path.exists(playbook[0]):
        return []
    results = []
    basedir = os.path.dirname(playbook[0])
    try:
        pb_data = parse_yaml_from_file(playbook[0])
    except AnsibleError, e:
        raise SystemExit(str(e))
Beispiel #25
0
def main(argv=None):
    if not argv:
        argv = sys.argv

    # Parse the command-line flags.
    flags = parser.parse_args(argv[1:])

    # set logging level
    logger.setLevel(logging.DEBUG)
    h1 = logging.StreamHandler(sys.stdout)
    h1.setLevel(getattr(logging, flags.logging_level))
    logger.addHandler(h1)

    # load data
    acl = utils.parse_yaml_from_file(flags.acl)['acl']
    keys = utils.parse_yaml_from_file(flags.keys)['keys']

    sync_acl(acl, keys, flags.key_name, flags.project)
    def _parse_playbook(self, playbook):
        ''' load YAML file, including handling for imported files '''

        dirname = os.path.dirname(playbook)
        playbook = utils.parse_yaml_from_file(playbook)

        for play in playbook:
            tasks = play.get('tasks', [])
            handlers = play.get('handlers', [])

            # process tasks in this file as well as imported tasks
            new_tasks = []
            for task in tasks:
                if 'include' in task:
                    self._include_tasks(play, task, dirname, new_tasks)
                else:
                    new_tasks.append(task)

            # now new_tasks contains a list of tasks, but tasks may contain
            # lists of with_items to loop over.  Do that.
            # TODO: refactor into subfunction
            new_tasks2 = []
            for task in new_tasks:
                if 'with_items' in task:
                    for item in task['with_items']:
                        produced_task = {}
                        name = task.get('name',
                                        task.get('action', 'unnamed task'))
                        action = task.get('action', None)
                        only_if = task.get('only_if', None)
                        if action is None:
                            raise errors.AnsibleError('action is required')
                        produced_task = task.copy()
                        produced_task['action'] = utils.template(
                            action, dict(item=item))
                        produced_task['name'] = utils.template(
                            name, dict(item=item))
                        if only_if:
                            produced_task['only_if'] = utils.template(
                                only_if, dict(item=item))
                        new_tasks2.append(produced_task)
                else:
                    new_tasks2.append(task)

            play['tasks'] = new_tasks2

            # process handlers as well as imported handlers
            new_handlers = []
            for handler in handlers:
                if 'include' in handler:
                    self._include_handlers(play, handler, dirname,
                                           new_handlers)
                else:
                    new_handlers.append(handler)
            play['handlers'] = new_handlers

        return playbook
Beispiel #27
0
    def _load_playbook_from_file(self, path, vars={}):
        '''
        run top level error checking on playbooks and allow them to include other playbooks.
        '''

        playbook_data  = utils.parse_yaml_from_file(path)
        accumulated_plays = []
        play_basedirs = []

        if type(playbook_data) != list:
            raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list")

        basedir = os.path.dirname(path)
        utils.plugins.push_basedir(basedir)
        for play in playbook_data:
            if type(play) != dict:
                raise errors.AnsibleError("parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play)
            if 'include' in play:
                if len(play.keys()) <= 2:
                    tokens = shlex.split(play['include'])

                    items = ['']
                    for k in play.keys():
                        if not k.startswith("with_"):
                            continue
                        plugin_name = k[5:]
                        if plugin_name not in utils.plugins.lookup_loader:
                            raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
                        terms = utils.template_ds(basedir, play[k], vars)
                        items = utils.plugins.lookup_loader.get(plugin_name, basedir=basedir, runner=None).run(terms, inject=vars)
                        break

                    for item in items:
                        incvars = vars.copy()
                        incvars['item'] = item
                        for t in tokens[1:]:
                            (k,v) = t.split("=", 1)
                            incvars[k] = utils.template_ds(basedir, v, incvars)
                        included_path = utils.path_dwim(basedir, tokens[0])
                        (plays, basedirs) = self._load_playbook_from_file(included_path, incvars)
                        for p in plays:
                            if 'vars' not in p:
                                p['vars'] = {}
                            if isinstance(p['vars'], dict):
                                p['vars'].update(incvars)
                            elif isinstance(p['vars'], list):
                                p['vars'].extend([dict(k=v) for k,v in incvars.iteritems()])
                        accumulated_plays.extend(plays)
                        play_basedirs.extend(basedirs)

                else:
                    raise errors.AnsibleError("parse error: playbook includes cannot be used with other directives: %s" % play)
            else:
                accumulated_plays.append(play)
                play_basedirs.append(basedir)

        return (accumulated_plays, play_basedirs)
Beispiel #28
0
    def _update_vars_files_for_host(self, host):

        if not host in self.playbook.SETUP_CACHE:
            # no need to process failed hosts or hosts not in this play
            return

        for filename in self.vars_files:

            if type(filename) == list:

                # loop over all filenames, loading the first one, and failing if # none found
                found = False
                sequence = []
                for real_filename in filename:
                    filename2 = utils.template(real_filename,
                                               self.playbook.SETUP_CACHE[host])
                    filename2 = utils.template(filename2, self.vars)
                    filename2 = utils.path_dwim(self.playbook.basedir,
                                                filename2)
                    sequence.append(filename2)
                    if os.path.exists(filename2):
                        found = True
                        data = utils.parse_yaml_from_file(filename2)
                        self.playbook.SETUP_CACHE[host].update(data)
                        self.playbook.callbacks.on_import_for_host(
                            host, filename2)
                        break
                    else:
                        self.playbook.callbacks.on_not_import_for_host(
                            host, filename2)
                if not found:
                    raise errors.AnsibleError(
                        "%s: FATAL, no files matched for vars_files import sequence: %s"
                        % (host, sequence))

            else:

                filename2 = utils.template(filename,
                                           self.playbook.SETUP_CACHE[host])
                filename2 = utils.template(filename2, self.vars)
                fpath = utils.path_dwim(self.playbook.basedir, filename2)
                new_vars = utils.parse_yaml_from_file(fpath)
                if new_vars:
                    self.playbook.SETUP_CACHE[host].update(new_vars)
Beispiel #29
0
    def _load_playbook_from_file(self, path, vars={}, vars_files=[]):
        '''
        run top level error checking on playbooks and allow them to include other playbooks.
        '''

        playbook_data = utils.parse_yaml_from_file(
            path, vault_password=self.vault_password)
        accumulated_plays = []
        play_basedirs = []

        if type(playbook_data) != list:
            raise errors.AnsibleError(
                "parse error: playbooks must be formatted as a YAML list, got %s"
                % type(playbook_data))

        basedir = os.path.dirname(path) or '.'
        utils.plugins.push_basedir(basedir)
        for play in playbook_data:
            if type(play) != dict:
                raise errors.AnsibleError(
                    "parse error: each play in a playbook must be a YAML dictionary (hash), received: %s"
                    % play)

            if 'include' in play:
                # a playbook (list of plays) decided to include some other list of plays
                # from another file.  The result is a flat list of plays in the end.

                play_vars = self._get_playbook_vars(play, vars)
                play_vars_files = self._get_playbook_vars_files(
                    play, vars_files)
                inc_vars, inc_path = self._get_include_info(
                    play, basedir, play_vars)
                play_vars.update(inc_vars)

                included_path = utils.path_dwim(
                    basedir, template(basedir, inc_path, play_vars))
                (plays, basedirs) = self._load_playbook_from_file(
                    included_path, vars=play_vars, vars_files=play_vars_files)
                for p in plays:
                    # support for parameterized play includes works by passing
                    # those variables along to the subservient play
                    p['vars'] = self._extend_play_vars(p, play_vars)
                    # now add in the vars_files
                    p['vars_files'] = utils.list_union(p.get('vars_files', []),
                                                       play_vars_files)

                accumulated_plays.extend(plays)
                play_basedirs.extend(basedirs)

            else:

                # this is a normal (non-included play)
                accumulated_plays.append(play)
                play_basedirs.append(basedir)

        return (accumulated_plays, play_basedirs)
Beispiel #30
0
    def _load_playbook_from_file(self, path, vars={}, vars_files=[]):
        '''
        run top level error checking on playbooks and allow them to include other playbooks.
        '''

        playbook_data  = utils.parse_yaml_from_file(path, vault_password=self.vault_password)
        accumulated_plays = []
        play_basedirs = []

        if type(playbook_data) != list:
            raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list, got %s" % type(playbook_data))

        basedir = os.path.dirname(path) or '.'
        utils.plugins.push_basedir(basedir)
        for play in playbook_data:
            if type(play) != dict:
                raise errors.AnsibleError("parse error: each play in a playbook must be a YAML dictionary (hash), recieved: %s" % play)

            if 'include' in play:
                # a playbook (list of plays) decided to include some other list of plays
                # from another file.  The result is a flat list of plays in the end.

                play_vars = self._get_playbook_vars(play, vars)
                play_vars_files = self._get_playbook_vars_files(play, vars_files)
                inc_vars, inc_path = self._get_include_info(play, basedir, play_vars)
                play_vars.update(inc_vars)

                included_path = utils.path_dwim(basedir, template(basedir, inc_path, play_vars))
                (plays, basedirs) = self._load_playbook_from_file(included_path, vars=play_vars, vars_files=play_vars_files)
                for p in plays:
                    # support for parameterized play includes works by passing
                    # those variables along to the subservient play
                    if 'vars' not in p:
                        p['vars'] = {}
                    if isinstance(p['vars'], dict):
                        p['vars'].update(play_vars)
                    elif isinstance(p['vars'], list):
                        # nobody should really do this, but handle vars: a=1 b=2
                        p['vars'].extend([{k:v} for k,v in play_vars.iteritems()])
                    elif p['vars'] == None:
                        # someone specified an empty 'vars:', so reset
                        # it to the vars we currently have
                        p['vars'] = play_vars.copy()
                    # now add in the vars_files
                    p['vars_files'] = utils.list_union(p.get('vars_files', []), play_vars_files)

                accumulated_plays.extend(plays)
                play_basedirs.extend(basedirs)

            else:

                # this is a normal (non-included play)
                accumulated_plays.append(play)
                play_basedirs.append(basedir)

        return (accumulated_plays, play_basedirs)
Beispiel #31
0
def find_children(playbook, playbook_dir):
    if not os.path.exists(playbook[0]):
        return []
    if playbook[1] == 'role':
        playbook_ds = {'roles': [{'role': playbook[0]}]}
    else:
        try:
            playbook_ds = parse_yaml_from_file(playbook[0])
        except AnsibleError, e:
            raise SystemExit(str(e))
    def _read_from_file(self, file_path, databag):
        data = parse_yaml_from_file(file_path, vault_password="")
        if data and type(data) != dict:
            self.module.fail_json(msg="%s must be stored as a dictionary/hash".format(file_path))
        elif data is None:
            data = {}

        if databag:
            data = self.convert_chef_user_data_bag(data)
        return data
Beispiel #33
0
def find_children(playbook, playbook_dir):
    if not os.path.exists(playbook[0]):
        return []
    if playbook[1] == 'role':
        playbook_ds = {'roles': [{'role': playbook[0]}]}
    else:
        try:
            playbook_ds = parse_yaml_from_file(playbook[0])
        except AnsibleError, e:
            raise SystemExit(str(e))
    def _read_from_file(self, file_path, databag):
        data = parse_yaml_from_file(file_path, vault_password="")
        if data and type(data) != dict:
            self.module.fail_json(msg="%s must be stored as a dictionary/hash".format(file_path))
        elif data is None:
            data = {}

        if databag:
            data = self.convert_chef_user_data_bag(data)
        return data
Beispiel #35
0
    def run(self, host):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name),
                        key=lambda g: g.depth)
        groups = [g.name for g in groupz]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in inventory_dir/group_vars/name_of_group
        for x in groups:
            p = os.path.join(basedir, "group_vars/%s" % x)
            paths = [p, '.'.join([p, 'yml']), '.'.join([p, 'yaml'])]
            for path in paths:
                if os.path.exists(path):
                    data = utils.parse_yaml_from_file(path)
                    if type(data) != dict:
                        raise errors.AnsibleError(
                            "%s must be stored as a dictionary/hash" % path)
                    results = utils.combine_vars(results, data)

        # load vars in inventory_dir/hosts_vars/name_of_host
        p = os.path.join(basedir, "host_vars/%s" % host.name)
        paths = [p, '.'.join([p, 'yml']), '.'.join([p, 'yaml'])]
        for path in paths:
            if os.path.exists(path):
                data = utils.parse_yaml_from_file(path)
                if type(data) != dict:
                    raise errors.AnsibleError(
                        "%s must be stored as a dictionary/hash" % path)
                results = utils.combine_vars(results, data)

        return results
Beispiel #36
0
    def run(self, host):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name), key=lambda g: g.depth)
        groups = [ g.name for g in groupz ]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in playbook_dir/group_vars/name_of_group
        for x in groups:
            path = os.path.join(basedir, "group_vars/%s" % x)
            if os.path.exists(path):
                data = utils.parse_yaml_from_file(path)
                if type(data) != dict:
                    raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
                if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                    # let data content override results if needed
                    results = utils.merge_hash(results, data)
                else:
                    results.update(data)

        # load vars in playbook_dir/group_vars/name_of_host
        path = os.path.join(basedir, "host_vars/%s" % host.name)
        if os.path.exists(path):
            data = utils.parse_yaml_from_file(path)
            if type(data) != dict:
                raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
            if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                # let data content override results if needed
                results = utils.merge_hash(results, data)
            else:
                results.update(data)
        return results
Beispiel #37
0
    def _load_tasks(self, tasks, vars={}, additional_conditions=[]):
        ''' handle task and handler include statements '''

        results = []
        if tasks is None:
            # support empty handler files, and the like.
            tasks = []

        for x in tasks:
            task_vars = self.vars.copy()
            task_vars.update(vars)
            if 'include' in x:
                tokens = shlex.split(x['include'])
                items = ['']
                included_additional_conditions = list(additional_conditions)
                for k in x:
                    if k.startswith("with_"):
                        plugin_name = k[5:]
                        if plugin_name not in utils.plugins.lookup_loader:
                            raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
                        terms = utils.template_ds(self.basedir, x[k], task_vars)
                        items = utils.plugins.lookup_loader.get(plugin_name, basedir=self.basedir, runner=None).run(terms, inject=task_vars)
                    elif k.startswith("when_"):
                        included_additional_conditions.append(utils.compile_when_to_only_if("%s %s" % (k[5:], x[k])))
                    elif k in ("include", "vars", "only_if"):
                        pass
                    else:
                        raise errors.AnsibleError("parse error: task includes cannot be used with other directives: %s" % k)

                if 'vars' in x:
                    task_vars.update(x['vars'])
                if 'only_if' in x:
                    included_additional_conditions.append(x['only_if'])

                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k,v) = t.split("=", 1)
                        mv[k] = utils.template_ds(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file))
                    results += self._load_tasks(data, mv, included_additional_conditions)
            elif type(x) == dict:
                results.append(Task(self,x,module_vars=task_vars, additional_conditions=additional_conditions))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Beispiel #38
0
    def _load_role_defaults(self, defaults_files):
        # process default variables
        default_vars = {}
        for filename in defaults_files:
            if os.path.exists(filename):
                new_default_vars = utils.parse_yaml_from_file(filename, vault_password=self.vault_password)
                if new_default_vars:
                    if type(new_default_vars) != dict:
                        raise errors.AnsibleError("%s must be stored as dictionary/hash: %s" % (filename, type(new_default_vars)))
                    default_vars = utils.combine_vars(default_vars, new_default_vars)

        return default_vars
Beispiel #39
0
    def _load_role_defaults(self, defaults_files):
        # process default variables
        default_vars = {}
        for filename in defaults_files:
            if os.path.exists(filename):
                new_default_vars = utils.parse_yaml_from_file(filename, vault_password=self.vault_password)
                if new_default_vars:
                    if type(new_default_vars) != dict:
                        raise errors.AnsibleError("%s must be stored as dictionary/hash: %s" % (filename, type(new_default_vars)))
                    default_vars = utils.combine_vars(default_vars, new_default_vars)

        return default_vars
Beispiel #40
0
    def _parse_playbook(self, playbook):
        ''' load YAML file, including handling for imported files '''
        
        dirname  = os.path.dirname(playbook)
        playbook = utils.parse_yaml_from_file(playbook)

        for play in playbook:
            tasks = play.get('tasks',[])
            handlers = play.get('handlers', [])

            # process tasks in this file as well as imported tasks
            new_tasks = []
            for task in tasks:
                if 'include' in task:
                    self._include_tasks(play, task, dirname, new_tasks)
                else:
                    new_tasks.append(task)

            # now new_tasks contains a list of tasks, but tasks may contain
            # lists of with_items to loop over.  Do that.
            # TODO: refactor into subfunction
            new_tasks2 = []
            for task in new_tasks:
                if 'with_items' in task:
                    for item in task['with_items']:
                        produced_task = {}
                        name    = task.get('name', task.get('action', 'unnamed task'))
                        action  = task.get('action', None)
                        only_if = task.get('only_if', None)
                        if action is None:
                            raise errors.AnsibleError('action is required')
                        produced_task = task.copy()
                        produced_task['action'] = utils.template(action, dict(item=item))
                        produced_task['name'] = utils.template(name, dict(item=item))
                        if only_if:
                            produced_task['only_if'] = utils.template(only_if, dict(item=item))
                        new_tasks2.append(produced_task)
                else:
                    new_tasks2.append(task)

            play['tasks'] = new_tasks2

            # process handlers as well as imported handlers
            new_handlers = [] 
            for handler in handlers:
                if 'include' in handler:
                    self._include_handlers(play, handler, dirname, new_handlers)
                else:
                    new_handlers.append(handler)
            play['handlers'] = new_handlers

        return playbook
Beispiel #41
0
    def _update_vars_files_for_host(self, host):

        if not host in self.playbook.SETUP_CACHE:
            # no need to process failed hosts or hosts not in this play
            return

        for filename in self.vars_files:

            if type(filename) == list:

                # loop over all filenames, loading the first one, and failing if # none found
                found = False
                sequence = []
                for real_filename in filename:
                    filename2 = utils.template(real_filename, self.playbook.SETUP_CACHE[host])
                    filename2 = utils.template(filename2, self.vars)
                    filename2 = utils.path_dwim(self.playbook.basedir, filename2)
                    sequence.append(filename2)
                    if os.path.exists(filename2):
                        found = True
                        data = utils.parse_yaml_from_file(filename2)
                        self.playbook.SETUP_CACHE[host].update(data)
                        self.playbook.callbacks.on_import_for_host(host, filename2)
                        break
                    else:
                        self.playbook.callbacks.on_not_import_for_host(host, filename2)
                if not found:
                    raise errors.AnsibleError(
                        "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence)
                    )

            else:

                filename2 = utils.template(filename, self.playbook.SETUP_CACHE[host])
                filename2 = utils.template(filename2, self.vars)
                fpath = utils.path_dwim(self.playbook.basedir, filename2)
                new_vars = utils.parse_yaml_from_file(fpath)
                if new_vars:
                    self.playbook.SETUP_CACHE[host].update(new_vars)
Beispiel #42
0
    def _load_play_from_file(self, path, vars={}):
        '''
        run top level error checking on playbooks and allow them to include other playbooks.
        '''

        task_data = utils.parse_yaml_from_file(path)

        if not isinstance(task_data, dict):
            raise errors.AbleError("parse error: tasks must be formatted as a YAML dict")

        data, basedir = self._prepare_play(task_data, self.basedir, vars)

        return data, basedir
    def run(self, host):

        """ Main body of the plugin, does actual loading """

        results = {}

        # Load config
        config = self.get_config()
        if config is None:
            return results

        # Calculate profiles path (path to the 'profiles/' directory)
        profiles_path = self.get_profiles_path()
        if profiles_path is None:
            return results
        
        # Prepare absolute profile path (path to the actual profile folder
        # in 'profiles/' folder)
        profile_path = os.path.join(profiles_path, config['profile'])
        if not os.path.exists(profile_path) or not os.path.isdir(profile_path):
            raise errors.AnsibleError("There is no such profile: %s" % profile_path)            
        
        # Start from specified profile path
        current_path = os.path.abspath(profile_path)
        
        # Traverse directories up, until we reach 'profiles_path'
        while True:
            
            vars_path = os.path.join(current_path, "vars.yml")
            
            if (os.path.exists(vars_path) and 
                os.path.isfile(vars_path) and
                os.stat(vars_path).st_size != 0):            
            
                data = utils.parse_yaml_from_file(vars_path)
                if type(data) != dict:
                    raise errors.AnsibleError("%s must be stored as a dictionary/hash" % vars_path)            
                
                results = utils.combine_vars(data, results)
            
            # if we reached profiles folder, than we traversed all 
            # directories till profiles folder.
            if current_path == profiles_path:
                break;
            
            # select parent directory
            current_path = os.path.abspath(os.path.join(current_path, os.pardir))
            
        # all done, results is a dictionary of variables
        return results
Beispiel #44
0
 def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
     # this number is arbitrary, but it seems sane
     if level > 20:
         raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
     for role in roles:
         role_path,role_vars = self._get_role_path(role)
         role_vars = utils.combine_vars(role_vars, passed_vars)
         vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
         vars_data = {}
         if os.path.isfile(vars):
             vars_data = utils.parse_yaml_from_file(vars)
             if vars_data:
                 role_vars = utils.combine_vars(vars_data, role_vars)
         defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')))
         defaults_data = {}
         if os.path.isfile(defaults):
             defaults_data = utils.parse_yaml_from_file(defaults)
         # the meta directory contains the yaml that should
         # hold the list of dependencies (if any)
         meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
         if os.path.isfile(meta):
             data = utils.parse_yaml_from_file(meta)
             if data:
                 dependencies = data.get('dependencies',[])
                 for dep in dependencies:
                     (dep_path,dep_vars) = self._get_role_path(dep)
                     meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta')))
                     if os.path.isfile(meta):
                         meta_data = utils.parse_yaml_from_file(meta)
                         if meta_data:
                             allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))
                             if not allow_dupes:
                                 if dep in self.included_roles:
                                     continue
                                 else:
                                     self.included_roles.append(dep)
                     dep_vars = utils.combine_vars(passed_vars, dep_vars)
                     dep_vars = utils.combine_vars(role_vars, dep_vars)
                     vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
                     vars_data = {}
                     if os.path.isfile(vars):
                         vars_data = utils.parse_yaml_from_file(vars)
                         if vars_data:
                             dep_vars = utils.combine_vars(vars_data, dep_vars)
                     defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults')))
                     dep_defaults_data = {}
                     if os.path.isfile(defaults):
                         dep_defaults_data = utils.parse_yaml_from_file(defaults)
                     if 'role' in dep_vars:
                         del dep_vars['role']
                     self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
                     dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data])
         # only add the current role when we're at the top level,
         # otherwise we'll end up in a recursive loop 
         if level == 0:
             dep_stack.append([role,role_path,role_vars,defaults_data])
     return dep_stack
Beispiel #45
0
def boilerplate_module(modfile, args, interpreter, check):
    """ simulate what ansible does with new style modules """

    #module_fh = open(modfile)
    #module_data = module_fh.read()
    #module_fh.close()

    replacer = module_common.ModuleReplacer()

    #included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1

    complex_args = {}
    if args.startswith("@"):
        # Argument is a YAML file (JSON is a subset of YAML)
        complex_args = utils.combine_vars(complex_args,
                                          utils.parse_yaml_from_file(args[1:]))
        args = ''
    elif args.startswith("{"):
        # Argument is a YAML document (not a file)
        complex_args = utils.combine_vars(complex_args, utils.parse_yaml(args))
        args = ''

    inject = {}
    if interpreter:
        if '=' not in interpreter:
            print 'interpreter must by in the form of ansible_python_interpreter=/usr/bin/python'
            sys.exit(1)
        interpreter_type, interpreter_path = interpreter.split('=')
        if not interpreter_type.startswith('ansible_'):
            interpreter_type = 'ansible_%s' % interpreter_type
        if not interpreter_type.endswith('_interpreter'):
            interpreter_type = '%s_interpreter' % interpreter_type
        inject[interpreter_type] = interpreter_path

    if check:
        complex_args['CHECKMODE'] = True

    (module_data, module_style,
     shebang) = replacer.modify_module(modfile, complex_args, args, inject)

    modfile2_path = os.path.expanduser("~/.ansible_module_generated")
    print "* including generated source, if any, saving to: %s" % modfile2_path
    print "* this may offset any line numbers in tracebacks/debuggers!"
    modfile2 = open(modfile2_path, 'w')
    modfile2.write(module_data)
    modfile2.close()
    modfile = modfile2_path

    return (modfile2_path, module_style)
    def get_group_vars(self, group, vault_password=None):
        result = {}
        
        filename = os.path.join(self._base_group_vars, "%s.yml" % group.name)
        if os.path.isfile( filename ):
            res = utils.parse_yaml_from_file(filename, vault_password=vault_password)
            if type(res) != dict:
                raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename)
            data = dict()
            for el in res:
                if len(self._group_allowed_facts) == 0 or el in self._group_allowed_facts:
                    data.update( { el: res[el] } )
            result.update(data)

        return result
Beispiel #47
0
    def _load_playbook_from_file(self, path, vars={}, vars_files=[]):
        '''
        run top level error checking on playbooks and allow them to include other playbooks.
        '''

        playbook_data = utils.parse_yaml_from_file(path, vault_password=self.vault_password)
        accumulated_plays = []
        play_basedirs = []

        if type(playbook_data) != list:
            raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list, got %s" % type(playbook_data))

        basedir = os.path.dirname(path) or '.'
        utils.plugins.push_basedir(basedir)
        for play in playbook_data:
            if type(play) != dict:
                raise errors.AnsibleError("parse error: each play in a playbook must be a YAML dictionary (hash), received: %s" % play)

            if 'include' in play:
                # a playbook (list of plays) decided to include some other list of plays
                # from another file.  The result is a flat list of plays in the end.

                play_vars = self._get_playbook_vars(play, vars)
                play_vars_files = self._get_playbook_vars_files(play, vars_files)
                inc_vars, inc_path = self._get_include_info(play, basedir, play_vars)
                play_vars.update(inc_vars)

                included_path = utils.path_dwim(basedir, template(basedir, inc_path, play_vars))
                (plays, basedirs) = self._load_playbook_from_file(included_path, vars=play_vars, vars_files=play_vars_files)
                for p in plays:
                    # support for parameterized play includes works by passing
                    # those variables along to the subservient play
                    p['vars'] = self._extend_play_vars(p, play_vars)
                    # now add in the vars_files
                    p['vars_files'] = utils.list_union(p.get('vars_files', []), play_vars_files)

                accumulated_plays.extend(plays)
                play_basedirs.extend(basedirs)

            else:

                # this is a normal (non-included play)
                accumulated_plays.append(play)
                play_basedirs.append(basedir)

        return (accumulated_plays, play_basedirs)
Beispiel #48
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            if 'include' in x:
                task_vars = self.vars.copy()
                tokens = shlex.split(x['include'])
                items = ['']
                for k in x:
                    if not k.startswith("with_"):
                        continue
                    plugin_name = k[5:]
                    if plugin_name not in utils.plugins.lookup_loader:
                        raise errors.AnsibleError(
                            "cannot find lookup plugin named %s for usage in with_%s"
                            % (plugin_name, plugin_name))
                    terms = utils.template_ds(self.basedir, x[k], task_vars)
                    items = utils.plugins.lookup_loader.get(
                        plugin_name, basedir=self.basedir,
                        runner=None).run(terms, inject=task_vars)

                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k, v) = t.split("=", 1)
                        mv[k] = utils.template_ds(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(
                        utils.path_dwim(self.basedir, include_file))
                    for y in data:
                        results.append(Task(self, y, module_vars=mv.copy()))
            elif type(x) == dict:
                task_vars = self.vars.copy()
                results.append(Task(self, x, module_vars=task_vars))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Beispiel #49
0
def parse_extra_vars(extras, vault_pass=None):
    inject_ansible_paths()
    from ansible import utils
    extra_vars = {}
    for extra_vars_opt in extras:
        if extra_vars_opt.startswith("@"):
            # Argument is a YAML file (JSON is a subset of YAML)
            kw = {}
            if vault_pass:
                kw['vault_password'] = vault_pass
            extra_vars = utils.combine_vars(extra_vars, utils.parse_yaml_from_file(extra_vars_opt[1:]), **kw)
        elif extra_vars_opt and extra_vars_opt[0] in '[{':
            # Arguments as YAML
            extra_vars = utils.combine_vars(extra_vars, utils.parse_yaml(extra_vars_opt))
        else:
            # Arguments as Key-value
            extra_vars = utils.combine_vars(extra_vars, utils.parse_kv(extra_vars_opt))
    return extra_vars
Beispiel #50
0
    def get_group_vars(self, group, vault_password=None):
        result = {}

        filename = os.path.join(self._base_group_vars, "%s.yml" % group.name)
        if os.path.isfile(filename):
            res = utils.parse_yaml_from_file(filename,
                                             vault_password=vault_password)
            if type(res) != dict:
                raise errors.AnsibleError(
                    "%s must be stored as a dictionary/hash" % filename)
            data = dict()
            for el in res:
                if len(self._group_allowed_facts
                       ) == 0 or el in self._group_allowed_facts:
                    data.update({el: res[el]})
            result.update(data)

        return result
Beispiel #51
0
    def _include_tasks(self, play, task, dirname, new_tasks):
        ''' load tasks included from external files. '''

        # include: some.yml a=2 b=3 c=4
        play_vars = self._get_vars(play, dirname)
        include_tokens = utils.template(task['include'], play_vars, SETUP_CACHE).split()
        path = utils.path_dwim(dirname, include_tokens[0])
        include_vars = {}
        for i,x in enumerate(include_tokens):
            if x.find("=") != -1:
                (k,v) = x.split("=")
                include_vars[k] = v
        inject_vars = play_vars.copy()
        inject_vars.update(include_vars)
        included = utils.parse_yaml_from_file(path)
        for x in included:
            if len(include_vars):
                x["vars"] = include_vars
            new_tasks.append(x)
Beispiel #52
0
    def _include_tasks(self, play, task, dirname, new_tasks):
        ''' load tasks included from external files. '''

        # include: some.yml a=2 b=3 c=4
        play_vars = self._get_vars(play, dirname)
        include_tokens = utils.template(task['include'], play_vars,
                                        SETUP_CACHE).split()
        path = utils.path_dwim(dirname, include_tokens[0])
        include_vars = {}
        for i, x in enumerate(include_tokens):
            if x.find("=") != -1:
                (k, v) = x.split("=")
                include_vars[k] = v
        inject_vars = play_vars.copy()
        inject_vars.update(include_vars)
        included = utils.parse_yaml_from_file(path)
        for x in included:
            if len(include_vars):
                x["vars"] = include_vars
            new_tasks.append(x)
    def run(self, terms, inject=None, **kwargs):

        terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject)

        ret = []

        for term in terms:

            dwimmed = utils.path_dwim(self.basedir, term)
            globbed = glob.glob(dwimmed)
            ret.extend(g for g in globbed if os.path.isfile(g))

        parsed_ret = []

        # go through filename list, turning each into parsed objects
        for _record in ret:

            _record_parsed = utils.parse_yaml_from_file(path=_record, vault_password=None)
            parsed_ret.append(_record_parsed)

        return parsed_ret
Beispiel #54
0
def find_children(playbook):
    if not os.path.exists(playbook[0]):
        return []
    results = []
    basedir = os.path.dirname(playbook[0])
    pb_data = parse_yaml_from_file(playbook[0])
    items = _playbook_items(pb_data)
    for item in items:
        for child in play_children(basedir, item, playbook[1]):
            if "$" in child['path'] or "{{" in child['path']:
                continue
            valid_tokens = list()
            for token in split_args(child['path']):
                if '=' in token:
                    break
                valid_tokens.append(token)
            path = ' '.join(valid_tokens)
            results.append({
                'path': path_dwim(basedir, path),
                'type': child['type']
            })
    return results
Beispiel #55
0
        def process_files(filename, filename2, filename3, filename4, host=None):

            """ pseudo-algorithm for deciding where new vars should go """

            data = utils.parse_yaml_from_file(filename4, vault_password=self.vault_password)
            if data:
                if type(data) != dict:
                    raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename4)
                if host is not None:
                    target_filename = None
                    if self._has_vars_in(filename2):
                        if not self._has_vars_in(filename3):
                            target_filename = filename3
                        else:
                            target_filename = filename4
                    update_vars_cache(host, data, target_filename=target_filename)
                else:
                    self.vars = utils.combine_vars(self.vars, data)
                # we did process this file
                return True
            # we did not process this file
            return False
Beispiel #56
0
def find_children(playbook):
    if not os.path.exists(playbook[0]):
        return []
    results = []
    basedir = os.path.dirname(playbook[0])
    pb_data = parse_yaml_from_file(playbook[0])
    items = _playbook_items(pb_data)
    for item in items:
        for child in play_children(basedir, item, playbook[1]):
            if "$" in child['path'] or "{{" in child['path']:
                continue
            valid_tokens = list()
            for token in split_args(child['path']):
                if '=' in token:
                    break
                valid_tokens.append(token)
            path = ' '.join(valid_tokens)
            results.append({
                'path': path_dwim(basedir, path),
                'type': child['type']
            })
    return results
Beispiel #57
0
        def process_files(filename, filename2, filename3, filename4, host=None):

            """ pseudo-algorithm for deciding where new vars should go """

            data = utils.parse_yaml_from_file(filename4, vault_password=self.vault_password)
            if data:
                if type(data) != dict:
                    raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename4)
                if host is not None:
                    if self._has_vars_in(filename2) and not self._has_vars_in(filename3):
                        # running a host specific pass and has host specific variables
                        # load into setup cache
                        update_vars_cache(host, inject, data, filename4)
                    elif self._has_vars_in(filename3) and not self._has_vars_in(filename4):
                        # handle mixed scope variables in filepath
                        update_vars_cache(host, inject, data, filename4)

                elif not self._has_vars_in(filename4):
                    # found a non-host specific variable, load into vars and NOT
                    # the setup cache
                    if host is not None:
                        self.vars.update(data)
                    else:
                        self.vars = utils.combine_vars(self.vars, data)
Beispiel #58
0
    def _update_vars_files_for_host(self, host):

        if type(self.vars_files) != list:
            self.vars_files = [ self.vars_files ]

        if host is not None:
            inject = {}
            inject.update(self.playbook.inventory.get_variables(host))
            inject.update(self.playbook.SETUP_CACHE[host])

        for filename in self.vars_files:

            if type(filename) == list:

                # loop over all filenames, loading the first one, and failing if # none found
                found = False
                sequence = []
                for real_filename in filename:
                    filename2 = template(self.basedir, real_filename, self.vars)
                    filename3 = filename2
                    if host is not None:
                        filename3 = template(self.basedir, filename2, inject)
                    filename4 = utils.path_dwim(self.basedir, filename3)
                    sequence.append(filename4)
                    if os.path.exists(filename4):
                        found = True
                        data = utils.parse_yaml_from_file(filename4)
                        if type(data) != dict:
                            raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename4)
                        if host is not None:
                            if self._has_vars_in(filename2) and not self._has_vars_in(filename3):
                                # this filename has variables in it that were fact specific
                                # so it needs to be loaded into the per host SETUP_CACHE
                                self.playbook.SETUP_CACHE[host].update(data)
                                self.playbook.callbacks.on_import_for_host(host, filename4)
                        elif not self._has_vars_in(filename4):
                            # found a non-host specific variable, load into vars and NOT
                            # the setup cache
                            self.vars.update(data)
                    elif host is not None:
                        self.playbook.callbacks.on_not_import_for_host(host, filename4)
                    if found:
                        break
                if not found and host is not None:
                    raise errors.AnsibleError(
                        "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence)
                    )

            else:
                # just one filename supplied, load it!

                filename2 = template(self.basedir, filename, self.vars)
                filename3 = filename2
                if host is not None:
                    filename3 = template(self.basedir, filename2, inject)
                filename4 = utils.path_dwim(self.basedir, filename3)
                if self._has_vars_in(filename4):
                    continue
                new_vars = utils.parse_yaml_from_file(filename4)
                if new_vars:
                    if type(new_vars) != dict:
                        raise errors.AnsibleError("%s must be stored as dictionary/hash: %s" % (filename4, type(new_vars)))
                    if host is not None and self._has_vars_in(filename2) and not self._has_vars_in(filename3):
                        # running a host specific pass and has host specific variables
                        # load into setup cache
                        self.playbook.SETUP_CACHE[host] = utils.combine_vars(
                            self.playbook.SETUP_CACHE[host], new_vars)
                        self.playbook.callbacks.on_import_for_host(host, filename4)
                    elif host is None:
                        # running a non-host specific pass and we can update the global vars instead
                        self.vars = utils.combine_vars(self.vars, new_vars)
Beispiel #59
0
    def _load_tasks(self, tasks, vars=None, default_vars=None, sudo_vars=None, additional_conditions=None, original_file=None, role_name=None):
        ''' handle task and handler include statements '''

        results = []
        if tasks is None:
            # support empty handler files, and the like.
            tasks = []
        if additional_conditions is None:
            additional_conditions = []
        if vars is None:
            vars = {}
        if default_vars is None:
            default_vars = {}
        if sudo_vars is None:
            sudo_vars = {}

        old_conditions = list(additional_conditions)

        for x in tasks:

            # prevent assigning the same conditions to each task on an include
            included_additional_conditions = list(old_conditions)

            if not isinstance(x, dict):
                raise errors.AnsibleError("expecting dict; got: %s" % x)

            # evaluate sudo vars for current and child tasks 
            included_sudo_vars = {}
            for k in ["sudo", "sudo_user"]:
                if k in x:
                    included_sudo_vars[k] = x[k]
                elif k in sudo_vars:
                    included_sudo_vars[k] = sudo_vars[k]
                    x[k] = sudo_vars[k]

            if 'meta' in x:
                if x['meta'] == 'flush_handlers':
                    results.append(Task(self,x))
                    continue

            task_vars = self.vars.copy()
            task_vars.update(vars)
            if original_file:
                task_vars['_original_file'] = original_file

            if 'include' in x:
                tokens = shlex.split(str(x['include']))
                items = ['']
                included_additional_conditions = list(additional_conditions)
                include_vars = {}
                for k in x:
                    if k.startswith("with_"):
                        utils.deprecated("include + with_items is an unsupported feature and has been undocumented for many releases because of this", "1.5")
                        plugin_name = k[5:]
                        if plugin_name not in utils.plugins.lookup_loader:
                            raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
                        terms = template(self.basedir, x[k], task_vars)
                        items = utils.plugins.lookup_loader.get(plugin_name, basedir=self.basedir, runner=None).run(terms, inject=task_vars)
                    elif k.startswith("when_"):
                        included_additional_conditions.insert(0, utils.compile_when_to_only_if("%s %s" % (k[5:], x[k])))
                    elif k == 'when':
                        if type(x[k]) is str:
                            included_additional_conditions.insert(0, utils.compile_when_to_only_if("jinja2_compare %s" % x[k]))
                        elif type(x[k]) is list:
                            for i in x[k]:
                                included_additional_conditions.insert(0, utils.compile_when_to_only_if("jinja2_compare %s" % i))
                    elif k in ("include", "vars", "default_vars", "only_if", "sudo", "sudo_user", "role_name"):
                        continue
                    else:
                        include_vars[k] = x[k]

                default_vars = x.get('default_vars', {})
                if not default_vars:
                    default_vars = self.default_vars
                else:
                    default_vars = utils.combine_vars(self.default_vars, default_vars)

                # append the vars defined with the include (from above) 
                # as well as the old-style 'vars' element. The old-style
                # vars are given higher precedence here (just in case)
                task_vars = utils.combine_vars(task_vars, include_vars)
                if 'vars' in x:
                    task_vars = utils.combine_vars(task_vars, x['vars'])

                if 'only_if' in x:
                    included_additional_conditions.append(x['only_if'])

                new_role = None
                if 'role_name' in x:
                    new_role = x['role_name']

                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k,v) = t.split("=", 1)
                        mv[k] = template(self.basedir, v, mv)
                    dirname = self.basedir
                    if original_file:
                        dirname = os.path.dirname(original_file)
                    include_file = template(dirname, tokens[0], mv)
                    include_filename = utils.path_dwim(dirname, include_file)
                    data = utils.parse_yaml_from_file(include_filename)
                    if 'role_name' in x and data is not None:
                        for x in data:
                            if 'include' in x:
                                x['role_name'] = new_role
                    loaded = self._load_tasks(data, mv, default_vars, included_sudo_vars, list(included_additional_conditions), original_file=include_filename, role_name=new_role)
                    results += loaded
            elif type(x) == dict:
                task = Task(self,x,module_vars=task_vars,default_vars=default_vars,additional_conditions=list(additional_conditions),role_name=role_name)
                results.append(task)
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results