Ejemplo n.º 1
0
    def run(self, host, vault_password=None):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name),
                        key=lambda g: g.depth)
        groups = [g.name for g in groupz]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in inventory_dir/group_vars/name_of_group
        for group in groups:
            if group in self.group_cache:
                results = self.group_cache[group]
            else:
                group_vars_dir = os.path.join(basedir, "group_vars")
                group_vars_files = vars_files(group_vars_dir, group)
                #if len(group_vars_files) > 1:
                #    raise errors.AnsibleError("Found more than one file for group '%s': %s"
                #                      % (group, group_vars_files))
                for path in group_vars_files:
                    data = utils.parse_yaml_from_file(
                        path, vault_password=vault_password)
                    if type(data) != dict:
                        raise errors.AnsibleError(
                            "%s must be stored as a dictionary/hash" % path)
                    if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                        # let data content override results if needed
                        results = utils.merge_hash(results, data)
                    else:
                        results.update(data)
                self.group_cache[group] = results

        # load vars in inventory_dir/hosts_vars/name_of_host
        host_vars_dir = os.path.join(basedir, "host_vars")
        host_vars_files = vars_files(host_vars_dir, host.name)
        if len(host_vars_files) > 1:
            raise errors.AnsibleError(
                "Found more than one file for host '%s': %s" %
                (host.name, host_vars_files))
        for path in host_vars_files:
            data = utils.parse_yaml_from_file(path,
                                              vault_password=vault_password)
            if type(data) != dict:
                raise errors.AnsibleError(
                    "%s must be stored as a dictionary/hash" % path)
            if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                # let data content override results if needed
                results = utils.merge_hash(results, data)
            else:
                results.update(data)
        return results
Ejemplo n.º 2
0
    def run(self, host, vault_password=None):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name), key=lambda g: g.depth)
        groups = [ g.name for g in groupz ]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in inventory_dir/group_vars/name_of_group
        for group in groups:
            if group in self.group_cache:
                results = self.group_cache[group]
            else:
                group_vars_dir = os.path.join(basedir, "group_vars")
                group_vars_files = vars_files(group_vars_dir, group)
                #if len(group_vars_files) > 1:
                #    raise errors.AnsibleError("Found more than one file for group '%s': %s"
                #                      % (group, group_vars_files))
                for path in group_vars_files:
                    data = utils.parse_yaml_from_file(path, vault_password=vault_password)
                    if type(data) != dict:
                        raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
                    if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                        # let data content override results if needed
                        results = utils.merge_hash(results, data)
                    else:
                        results.update(data)
                self.group_cache[group] = results

        # load vars in inventory_dir/hosts_vars/name_of_host
        host_vars_dir = os.path.join(basedir, "host_vars")
        host_vars_files = vars_files(host_vars_dir, host.name)
        if len(host_vars_files) > 1:
            raise errors.AnsibleError("Found more than one file for host '%s': %s"
                                  % (host.name, host_vars_files))
        for path in host_vars_files:
            data = utils.parse_yaml_from_file(path, vault_password=vault_password)
            if type(data) != dict:
                raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
            if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                # let data content override results if needed
                results = utils.merge_hash(results, data)
            else:
                results.update(data)
        print("In group_vars_dirs run for %s" % host.name)
        print results
        return results
Ejemplo n.º 3
0
    def run(self, host):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name),
                        key=lambda g: g.depth)
        groups = [g.name for g in groupz]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in inventory_dir/group_vars/name_of_group
        for x in groups:
            p = os.path.join(basedir, "group_vars/%s" % x)
            paths = [p, '.'.join([p, 'yml']), '.'.join([p, 'yaml'])]
            for path in paths:
                if os.path.exists(path):
                    data = utils.parse_yaml_from_file(path)
                    if type(data) != dict:
                        raise errors.AnsibleError(
                            "%s must be stored as a dictionary/hash" % path)
                    if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                        # let data content override results if needed
                        results = utils.merge_hash(results, data)
                    else:
                        results.update(data)
                    break

        # load vars in inventory_dir/hosts_vars/name_of_host
        p = os.path.join(basedir, "host_vars/%s" % host.name)
        paths = [p, '.'.join([p, 'yml']), '.'.join([p, 'yaml'])]
        for path in paths:
            if os.path.exists(path):
                data = utils.parse_yaml_from_file(path)
                if type(data) != dict:
                    raise errors.AnsibleError(
                        "%s must be stored as a dictionary/hash" % path)
                if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                    # let data content override results if needed
                    results = utils.merge_hash(results, data)
                else:
                    results.update(data)
                break
        return results
  def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):

    if not module_args:
      result = dict(failed=True, msg="No source file given")
      return ReturnData(conn=conn, comm_ok=True, result=result)

    source = template.template(self.runner.basedir, module_args, inject)

    if '_original_file' in inject:
      source = utils.path_dwim_relative(inject['_original_file'], 'vars', source, self.runner.basedir, False)
    else:
      source = utils.path_dwim(self.runner.basedir, source)

    data = {}

    if os.path.exists(source):
      data = utils.parse_yaml_from_file(source, vault_password=self.runner.vault_pass)

      if data and type(data) != dict:
        raise errors.AnsibleError("%s must be stored as a dictionary/hash" % source)

    if not hasattr(conn.runner, 'mergeBuffer'):
      conn.runner.mergeBuffer = {}

    if conn.host in conn.runner.mergeBuffer:
      data = utils.merge_hash(conn.runner.mergeBuffer[conn.host], data)

    conn.runner.mergeBuffer[conn.host] = data

    result = dict(ansible_facts=data)
    return ReturnData(conn=conn, comm_ok=True, result=result)
Ejemplo n.º 5
0
def defaults( env , runtime , vars_file=None ) :

    if vars_file :
        extra_vars = yaml.load( open( vars_file ) )
        runtime.update(extra_vars)

    defaults = {
      'django': {},
      'groups': {},
      'ansible_eth0': {
        'ipv4': {
          'address':'127.0.0.1'
          }
        }
      }

    for dep in get_deps([]) :
        meta_file = os.path.normpath( os.path.join( os.sys.argv[1] , '..' , dep , 'defaults/main.yml' ) )
        if os.path.isfile( meta_file ) :
            defaults = add_vars( defaults , Template( read_file( meta_file ) ) , runtime )

    defaults = add_vars( defaults , env.get_template('defaults/main.yml') , runtime )
    if os.path.isfile( os.path.join( os.sys.argv[1] , 'vars/main.yml' ) ) :
        defaults = add_vars( defaults , env.get_template('vars/main.yml') , runtime )

    return merge_hash( defaults , runtime )
Ejemplo n.º 6
0
    def run(self, host):
        # return the inventory variables for the host

        inventory = self.inventory
        #hostrec = inventory.get_host(host)

        groupz = sorted(inventory.groups_for_host(host.name), key=lambda g: g.depth)
        groups = [ g.name for g in groupz ]
        basedir = inventory.basedir()

        if basedir is None:
            # could happen when inventory is passed in via the API
            return

        results = {}

        # load vars in playbook_dir/group_vars/name_of_group
        for x in groups:
            path = os.path.join(basedir, "group_vars/%s" % x)
            if os.path.exists(path):
                data = utils.parse_yaml_from_file(path)
                if type(data) != dict:
                    raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
                if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                    # let data content override results if needed
                    results = utils.merge_hash(results, data)
                else:
                    results.update(data)

        # load vars in playbook_dir/group_vars/name_of_host
        path = os.path.join(basedir, "host_vars/%s" % host.name)
        if os.path.exists(path):
            data = utils.parse_yaml_from_file(path)
            if type(data) != dict:
                raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
            if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                # let data content override results if needed
                results = utils.merge_hash(results, data)
            else:
                results.update(data)
        return results
Ejemplo n.º 7
0
    def playbook_on_play_start(self, name):
        play_vars = merge_hash(self.play.vars,
                               getattr(self.play, 'vars_file_vars', {}))
        play_vars = merge_hash(play_vars,
                               getattr(self.playbook, 'extra_vars', {}))
        pem = play_vars.get('creds_ssh_private_key', None)
        if pem is None: return
        key = RSAKey.from_private_key(StringIO.StringIO(pem))

        hexdigest = unpack('16B', key.get_fingerprint())
        hexdigest = ':'.join(['%02x' % x for x in hexdigest])
        display('Loading SSH private key %s' % hexdigest)

        pub = '%s %s %s' % (key.get_name(), key.get_base64(), self.KEY_COMMENT)
        for x in self.play.tasks() + self.play.handlers():
            y = getattr(x, 'module_vars', None)
            if y: y['creds_ssh_public_key'] = pub

        ssh_agent = play_vars.get('creds_ssh_agent', True)
        if not ssh_agent: return

        msg = Message()
        msg.add_byte(chr(self.SSH2_AGENTC_ADD_IDENTITY))
        msg.add_string(key.get_name())
        msg.add_mpint(key.n)
        msg.add_mpint(key.e)
        msg.add_mpint(key.d)
        msg.add_mpint(0)
        msg.add_mpint(key.p)
        msg.add_mpint(key.q)
        msg.add_string(self.KEY_COMMENT)

        agent = Agent()
        if agent._conn:
            agent._send_message(msg)
        else:
            warning('Failed to connect to ssh-agent')
        agent.close()
Ejemplo n.º 8
0
    def run(self, host, vault_password=None):
        """ For backwards compatibility, when only vars per host were retrieved
            This method should return both host specific vars as well as vars
            calculated from groups it is a member of """
        result = {}
        result.update(self.get_host_vars(host, vault_password))

        for g in host.groups:
            result.update(self.get_group_vars(g, vault_password))
            if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                result = utils.merge_hash(result, data)
            else:
                result.update(data)
        return result
Ejemplo n.º 9
0
 def run(self, host, vault_password=None):
     """ For backwards compatibility, when only vars per host were retrieved
         This method should return both host specific vars as well as vars
         calculated from groups it is a member of """
     result = {}
     result.update(self.get_host_vars(host, vault_password))
     
     for g in host.groups:
         result.update(self.get_group_vars(g,vault_password))
         if C.DEFAULT_HASH_BEHAVIOUR == "merge":
             result = utils.merge_hash(result, data)
         else:
             result.update(data)
     return result
Ejemplo n.º 10
0
    def _load_tasks(self, tasks, vars=None, role_params=None, default_vars=None, sudo_vars=None,
                    additional_conditions=None, original_file=None, role_name=None):
        ''' handle task and handler include statements '''

        results = []
        if tasks is None:
            # support empty handler files, and the like.
            tasks = []
        if additional_conditions is None:
            additional_conditions = []
        if vars is None:
            vars = {}
        if role_params is None:
            role_params = {}
        if default_vars is None:
            default_vars = {}
        if sudo_vars is None:
            sudo_vars = {}

        old_conditions = list(additional_conditions)

        for x in tasks:

            # prevent assigning the same conditions to each task on an include
            included_additional_conditions = list(old_conditions)

            if not isinstance(x, dict):
                raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file))

            # evaluate sudo vars for current and child tasks
            included_sudo_vars = {}
            for k in ["sudo", "sudo_user"]:
                if k in x:
                    included_sudo_vars[k] = x[k]
                elif k in sudo_vars:
                    included_sudo_vars[k] = sudo_vars[k]
                    x[k] = sudo_vars[k]

            if 'meta' in x:
                if x['meta'] == 'flush_handlers':
                    results.append(Task(self, x))
                    continue

            task_vars = vars.copy()
            if original_file:
                task_vars['_original_file'] = original_file

            if 'include' in x:
                tokens = split_args(str(x['include']))
                included_additional_conditions = list(additional_conditions)
                include_vars = {}
                for k in x:
                    if k.startswith("with_"):
                        if original_file:
                            offender = " (in %s)" % original_file
                        else:
                            offender = ""
                        utils.deprecated("include + with_items is a removed deprecated feature" + offender, "1.5", removed=True)
                    elif k.startswith("when_"):
                        utils.deprecated("\"when_<criteria>:\" is a removed deprecated feature, use the simplified 'when:' conditional directly", None, removed=True)
                    elif k == 'when':
                        if isinstance(x[k], (basestring, bool)):
                            included_additional_conditions.append(x[k])
                        elif type(x[k]) is list:
                            included_additional_conditions.extend(x[k])
                    elif k in ("include", "vars", "role_params", "default_vars", "sudo", "sudo_user", "role_name", "no_log"):
                        continue
                    else:
                        include_vars[k] = x[k]

                # get any role parameters specified
                role_params = x.get('role_params', {})

                # get any role default variables specified
                default_vars = x.get('default_vars', {})
                if not default_vars:
                    default_vars = self.default_vars
                else:
                    default_vars = utils.combine_vars(self.default_vars, default_vars)

                # append the vars defined with the include (from above)
                # as well as the old-style 'vars' element. The old-style
                # vars are given higher precedence here (just in case)
                task_vars = utils.combine_vars(task_vars, include_vars)
                if 'vars' in x:
                    task_vars = utils.combine_vars(task_vars, x['vars'])

                new_role = None
                if 'role_name' in x:
                    new_role = x['role_name']

                mv = task_vars.copy()
                for t in tokens[1:]:
                    (k,v) = t.split("=", 1)
                    v = unquote(v)
                    mv[k] = template(self.basedir, v, mv)
                dirname = self.basedir
                if original_file:
                    dirname = os.path.dirname(original_file)

                # temp vars are used here to avoid trampling on the existing vars structures
                temp_vars = utils.merge_hash(self.vars, self.vars_file_vars)
                temp_vars = utils.merge_hash(temp_vars, mv)
                temp_vars = utils.merge_hash(temp_vars, self.playbook.extra_vars)
                include_file = template(dirname, tokens[0], temp_vars)
                include_filename = utils.path_dwim(dirname, include_file)

                data = utils.parse_yaml_from_file(include_filename, vault_password=self.vault_password)
                if 'role_name' in x and data is not None:
                    for y in data:
                        if isinstance(y, dict) and 'include' in y:
                            y['role_name'] = new_role
                loaded = self._load_tasks(data, mv, role_params, default_vars, included_sudo_vars, list(included_additional_conditions), original_file=include_filename, role_name=new_role)
                results += loaded
            elif type(x) == dict:
                task = Task(
                    self, x,
                    module_vars=task_vars,
                    play_vars=self.vars,
                    play_file_vars=self.vars_file_vars,
                    role_vars=self.role_vars,
                    role_params=role_params,
                    default_vars=default_vars,
                    additional_conditions=list(additional_conditions),
                    role_name=role_name
                )
                results.append(task)
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Ejemplo n.º 11
0
    def _load_tasks(self,
                    tasks,
                    vars=None,
                    role_params=None,
                    default_vars=None,
                    sudo_vars=None,
                    additional_conditions=None,
                    original_file=None,
                    role_name=None):
        ''' handle task and handler include statements '''

        results = []
        if tasks is None:
            # support empty handler files, and the like.
            tasks = []
        if additional_conditions is None:
            additional_conditions = []
        if vars is None:
            vars = {}
        if role_params is None:
            role_params = {}
        if default_vars is None:
            default_vars = {}
        if sudo_vars is None:
            sudo_vars = {}

        old_conditions = list(additional_conditions)

        for x in tasks:

            # prevent assigning the same conditions to each task on an include
            included_additional_conditions = list(old_conditions)

            if not isinstance(x, dict):
                raise errors.AnsibleError(
                    "expecting dict; got: %s, error in %s" %
                    (x, original_file))

            # evaluate sudo vars for current and child tasks
            included_sudo_vars = {}
            for k in ["sudo", "sudo_user"]:
                if k in x:
                    included_sudo_vars[k] = x[k]
                elif k in sudo_vars:
                    included_sudo_vars[k] = sudo_vars[k]
                    x[k] = sudo_vars[k]

            if 'meta' in x:
                if x['meta'] == 'flush_handlers':
                    results.append(Task(self, x))
                    continue

            task_vars = vars.copy()
            if original_file:
                task_vars['_original_file'] = original_file

            if 'include' in x:
                tokens = split_args(str(x['include']))
                included_additional_conditions = list(additional_conditions)
                include_vars = {}
                for k in x:
                    if k.startswith("with_"):
                        if original_file:
                            offender = " (in %s)" % original_file
                        else:
                            offender = ""
                        utils.deprecated(
                            "include + with_items is a removed deprecated feature"
                            + offender,
                            "1.5",
                            removed=True)
                    elif k.startswith("when_"):
                        utils.deprecated(
                            "\"when_<criteria>:\" is a removed deprecated feature, use the simplified 'when:' conditional directly",
                            None,
                            removed=True)
                    elif k == 'when':
                        if isinstance(x[k], (basestring, bool)):
                            included_additional_conditions.append(x[k])
                        elif type(x[k]) is list:
                            included_additional_conditions.extend(x[k])
                    elif k in ("include", "vars", "role_params",
                               "default_vars", "sudo", "sudo_user",
                               "role_name", "no_log"):
                        continue
                    else:
                        include_vars[k] = x[k]

                # get any role parameters specified
                role_params = x.get('role_params', {})

                # get any role default variables specified
                default_vars = x.get('default_vars', {})
                if not default_vars:
                    default_vars = self.default_vars
                else:
                    default_vars = utils.combine_vars(self.default_vars,
                                                      default_vars)

                # append the vars defined with the include (from above)
                # as well as the old-style 'vars' element. The old-style
                # vars are given higher precedence here (just in case)
                task_vars = utils.combine_vars(task_vars, include_vars)
                if 'vars' in x:
                    task_vars = utils.combine_vars(task_vars, x['vars'])

                new_role = None
                if 'role_name' in x:
                    new_role = x['role_name']

                mv = task_vars.copy()
                for t in tokens[1:]:
                    (k, v) = t.split("=", 1)
                    v = unquote(v)
                    mv[k] = template(self.basedir, v, mv)
                dirname = self.basedir
                if original_file:
                    dirname = os.path.dirname(original_file)

                # temp vars are used here to avoid trampling on the existing vars structures
                temp_vars = utils.merge_hash(self.vars, self.vars_file_vars)
                temp_vars = utils.merge_hash(temp_vars, mv)
                temp_vars = utils.merge_hash(temp_vars,
                                             self.playbook.extra_vars)
                include_file = template(dirname, tokens[0], temp_vars)
                include_filename = utils.path_dwim(dirname, include_file)

                data = utils.parse_yaml_from_file(
                    include_filename, vault_password=self.vault_password)
                if 'role_name' in x and data is not None:
                    for y in data:
                        if isinstance(y, dict) and 'include' in y:
                            y['role_name'] = new_role
                loaded = self._load_tasks(data,
                                          mv,
                                          role_params,
                                          default_vars,
                                          included_sudo_vars,
                                          list(included_additional_conditions),
                                          original_file=include_filename,
                                          role_name=new_role)
                results += loaded
            elif type(x) == dict:
                task = Task(self,
                            x,
                            module_vars=task_vars,
                            play_vars=self.vars,
                            play_file_vars=self.vars_file_vars,
                            role_vars=self.role_vars,
                            role_params=role_params,
                            default_vars=default_vars,
                            additional_conditions=list(additional_conditions),
                            role_name=role_name)
                results.append(task)
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Ejemplo n.º 12
0
    def __init__(self, playbook, ds, basedir, vault_password=None):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError(
                    "%s is not a legal parameter at this level in an Ansible Playbook"
                    % x)

        # allow all playbook keys to be set by --extra-vars
        self.vars = ds.get('vars', {})
        self.vars_prompt = ds.get('vars_prompt', {})
        self.playbook = playbook
        self.vars = self._get_vars()
        self.vars_file_vars = dict()  # these are vars read in from vars_files:
        self.role_vars = dict(
        )  # these are vars read in from vars/main.yml files in roles
        self.basedir = basedir
        self.roles = ds.get('roles', None)
        self.tags = ds.get('tags', None)
        self.vault_password = vault_password
        self.environment = ds.get('environment', {})

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [str, unicode]:
            self.tags = self.tags.split(",")
        elif type(self.tags) != list:
            self.tags = []

        # make sure we have some special internal variables set, which
        # we use later when loading tasks and handlers
        load_vars = dict()
        load_vars['playbook_dir'] = os.path.abspath(self.basedir)
        if self.playbook.inventory.basedir() is not None:
            load_vars['inventory_dir'] = self.playbook.inventory.basedir()
        if self.playbook.inventory.src() is not None:
            load_vars['inventory_file'] = self.playbook.inventory.src()

        # We first load the vars files from the datastructure
        # so we have the default variables to pass into the roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        processed_vars_files = self._update_vars_files_for_host(None)

        # now we load the roles into the datastructure
        self.included_roles = []
        ds = self._load_roles(self.roles, ds)

        # and finally re-process the vars files as they may have been updated
        # by the included roles, but exclude any which have been processed
        self.vars_files = utils.list_difference(ds.get('vars_files', []),
                                                processed_vars_files)
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')

        self._update_vars_files_for_host(None)

        # template everything to be efficient, but do not pre-mature template
        # tasks/handlers as they may have inventory scope overrides. We also
        # create a set of temporary variables for templating, so we don't
        # trample on the existing vars structures
        _tasks = ds.pop('tasks', [])
        _handlers = ds.pop('handlers', [])

        temp_vars = utils.merge_hash(self.vars, self.vars_file_vars)
        temp_vars = utils.merge_hash(temp_vars, self.playbook.extra_vars)

        ds = template(basedir, ds, temp_vars)
        ds['tasks'] = _tasks
        ds['handlers'] = _handlers

        self._ds = ds

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            try:
                hosts = ';'.join(hosts)
            except TypeError, e:
                raise errors.AnsibleError('improper host declaration: %s' %
                                          str(e))
Ejemplo n.º 13
0
def add_vars ( invars , template , runtime ) :
    return merge_hash( invars , yaml.load( template.render(runtime) ) )
Ejemplo n.º 14
0
    def __init__(self, playbook, ds, basedir, vault_password=None):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter at this level in an Ansible Playbook" % x)

        # allow all playbook keys to be set by --extra-vars
        self.vars             = ds.get('vars', {})
        self.vars_prompt      = ds.get('vars_prompt', {})
        self.playbook         = playbook
        self.vars             = self._get_vars()
        self.vars_file_vars   = dict() # these are vars read in from vars_files:
        self.role_vars        = dict() # these are vars read in from vars/main.yml files in roles
        self.basedir          = basedir
        self.roles            = ds.get('roles', None)
        self.tags             = ds.get('tags', None)
        self.vault_password   = vault_password
        self.environment      = ds.get('environment', {})

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [ str, unicode ]:
            self.tags = self.tags.split(",")
        elif type(self.tags) != list:
            self.tags = []

        # make sure we have some special internal variables set, which
        # we use later when loading tasks and handlers
        load_vars = dict()
        load_vars['playbook_dir'] = os.path.abspath(self.basedir)
        if self.playbook.inventory.basedir() is not None:
            load_vars['inventory_dir'] = self.playbook.inventory.basedir()
        if self.playbook.inventory.src() is not None:
            load_vars['inventory_file'] = self.playbook.inventory.src()

        # We first load the vars files from the datastructure
        # so we have the default variables to pass into the roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        processed_vars_files = self._update_vars_files_for_host(None)

        # now we load the roles into the datastructure
        self.included_roles = []
        ds = self._load_roles(self.roles, ds)

        # and finally re-process the vars files as they may have been updated
        # by the included roles, but exclude any which have been processed
        self.vars_files = utils.list_difference(ds.get('vars_files', []), processed_vars_files)
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')

        self._update_vars_files_for_host(None)

        # template everything to be efficient, but do not pre-mature template
        # tasks/handlers as they may have inventory scope overrides. We also
        # create a set of temporary variables for templating, so we don't
        # trample on the existing vars structures
        _tasks    = ds.pop('tasks', [])
        _handlers = ds.pop('handlers', [])

        temp_vars = utils.merge_hash(self.vars, self.vars_file_vars)
        temp_vars = utils.merge_hash(temp_vars, self.playbook.extra_vars)

        ds = template(basedir, ds, temp_vars)
        ds['tasks'] = _tasks
        ds['handlers'] = _handlers

        self._ds = ds

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            hosts = ';'.join(hosts)
        self.serial           = str(ds.get('serial', 0))
        self.hosts            = hosts
        self.name             = ds.get('name', self.hosts)
        self._tasks           = ds.get('tasks', [])
        self._handlers        = ds.get('handlers', [])
        self.remote_user      = ds.get('remote_user', ds.get('user', self.playbook.remote_user))
        self.remote_port      = ds.get('port', self.playbook.remote_port)
        self.sudo             = ds.get('sudo', self.playbook.sudo)
        self.sudo_user        = ds.get('sudo_user', self.playbook.sudo_user)
        self.transport        = ds.get('connection', self.playbook.transport)
        self.remote_port      = self.remote_port
        self.any_errors_fatal = utils.boolean(ds.get('any_errors_fatal', 'false'))
        self.accelerate       = utils.boolean(ds.get('accelerate', 'false'))
        self.accelerate_port  = ds.get('accelerate_port', None)
        self.accelerate_ipv6  = ds.get('accelerate_ipv6', False)
        self.max_fail_pct     = int(ds.get('max_fail_percentage', 100))
        self.su               = ds.get('su', self.playbook.su)
        self.su_user          = ds.get('su_user', self.playbook.su_user)
        self.no_log           = utils.boolean(ds.get('no_log', 'false'))

        # gather_facts is not a simple boolean, as None means  that a 'smart'
        # fact gathering mode will be used, so we need to be careful here as
        # calling utils.boolean(None) returns False
        self.gather_facts = ds.get('gather_facts', None)
        if self.gather_facts:
            self.gather_facts = utils.boolean(self.gather_facts)

        # Fail out if user specifies a sudo param with a su param in a given play
        if (ds.get('sudo') or ds.get('sudo_user')) and (ds.get('su') or ds.get('su_user')):
            raise errors.AnsibleError('sudo params ("sudo", "sudo_user") and su params '
                                      '("su", "su_user") cannot be used together')

        load_vars['role_names'] = ds.get('role_names', [])

        self._tasks      = self._load_tasks(self._ds.get('tasks', []), load_vars)
        self._handlers   = self._load_tasks(self._ds.get('handlers', []), load_vars)

        # apply any missing tags to role tasks
        self._late_merge_role_tags()

        if self.sudo_user != 'root':
            self.sudo = True

        # place holder for the discovered hosts to be used in this play
        self._play_hosts = None
Ejemplo n.º 15
0
    def __init__(self, playbook, ds, basedir, vault_password=None):
        ''' constructor loads from a play datastructure '''

        for x in ds.keys():
            if not x in Play.VALID_KEYS:
                raise errors.AnsibleError("%s is not a legal parameter at this level in an Ansible Playbook" % x)

        # allow all playbook keys to be set by --extra-vars
        self.vars             = ds.get('vars', {})
        self.vars_prompt      = ds.get('vars_prompt', {})
        self.playbook         = playbook
        self.vars             = self._get_vars()
        self.vars_file_vars   = dict() # these are vars read in from vars_files:
        self.role_vars        = dict() # these are vars read in from vars/main.yml files in roles
        self.basedir          = basedir
        self.roles            = ds.get('roles', None)
        self.tags             = ds.get('tags', None)
        self.vault_password   = vault_password

        if self.tags is None:
            self.tags = []
        elif type(self.tags) in [ str, unicode ]:
            self.tags = self.tags.split(",")
        elif type(self.tags) != list:
            self.tags = []

        # make sure we have some special internal variables set, which
        # we use later when loading tasks and handlers
        load_vars = dict()
        load_vars['playbook_dir'] = os.path.abspath(self.basedir)
        if self.playbook.inventory.basedir() is not None:
            load_vars['inventory_dir'] = self.playbook.inventory.basedir()
        if self.playbook.inventory.src() is not None:
            load_vars['inventory_file'] = self.playbook.inventory.src()

        # We first load the vars files from the datastructure
        # so we have the default variables to pass into the roles
        self.vars_files = ds.get('vars_files', [])
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')
        processed_vars_files = self._update_vars_files_for_host(None)

        # now we load the roles into the datastructure
        self.included_roles = []
        ds = self._load_roles(self.roles, ds)

        # and finally re-process the vars files as they may have been updated
        # by the included roles, but exclude any which have been processed
        self.vars_files = utils.list_difference(ds.get('vars_files', []), processed_vars_files)
        if not isinstance(self.vars_files, list):
            raise errors.AnsibleError('vars_files must be a list')

        self._update_vars_files_for_host(None)

        # template everything to be efficient, but do not pre-mature template
        # tasks/handlers as they may have inventory scope overrides. We also
        # create a set of temporary variables for templating, so we don't
        # trample on the existing vars structures
        _tasks    = ds.pop('tasks', [])
        _handlers = ds.pop('handlers', [])

        temp_vars = utils.merge_hash(self.vars, self.vars_file_vars)
        temp_vars = utils.merge_hash(temp_vars, self.playbook.extra_vars)

        ds = template(basedir, ds, temp_vars)
        ds['tasks'] = _tasks
        ds['handlers'] = _handlers

        self._ds = ds

        hosts = ds.get('hosts')
        if hosts is None:
            raise errors.AnsibleError('hosts declaration is required')
        elif isinstance(hosts, list):
            hosts = ';'.join(hosts)
        self.serial           = str(ds.get('serial', 0))
        self.hosts            = hosts
        self.name             = ds.get('name', self.hosts)
        self._tasks           = ds.get('tasks', [])
        self._handlers        = ds.get('handlers', [])
        self.remote_user      = ds.get('remote_user', ds.get('user', self.playbook.remote_user))
        self.remote_port      = ds.get('port', self.playbook.remote_port)
        self.sudo             = ds.get('sudo', self.playbook.sudo)
        self.sudo_user        = ds.get('sudo_user', self.playbook.sudo_user)
        self.transport        = ds.get('connection', self.playbook.transport)
        self.remote_port      = self.remote_port
        self.any_errors_fatal = utils.boolean(ds.get('any_errors_fatal', 'false'))
        self.accelerate       = utils.boolean(ds.get('accelerate', 'false'))
        self.accelerate_port  = ds.get('accelerate_port', None)
        self.accelerate_ipv6  = ds.get('accelerate_ipv6', False)
        self.max_fail_pct     = int(ds.get('max_fail_percentage', 100))
        self.su               = ds.get('su', self.playbook.su)
        self.su_user          = ds.get('su_user', self.playbook.su_user)
        self.no_log           = utils.boolean(ds.get('no_log', 'false'))

        # gather_facts is not a simple boolean, as None means  that a 'smart'
        # fact gathering mode will be used, so we need to be careful here as
        # calling utils.boolean(None) returns False
        self.gather_facts = ds.get('gather_facts', None)
        if self.gather_facts:
            self.gather_facts = utils.boolean(self.gather_facts)

        # Fail out if user specifies a sudo param with a su param in a given play
        if (ds.get('sudo') or ds.get('sudo_user')) and (ds.get('su') or ds.get('su_user')):
            raise errors.AnsibleError('sudo params ("sudo", "sudo_user") and su params '
                                      '("su", "su_user") cannot be used together')

        load_vars['role_names'] = ds.get('role_names', [])

        self._tasks      = self._load_tasks(self._ds.get('tasks', []), load_vars)
        self._handlers   = self._load_tasks(self._ds.get('handlers', []), load_vars)

        # apply any missing tags to role tasks
        self._late_merge_role_tags()

        if self.sudo_user != 'root':
            self.sudo = True

        # place holder for the discovered hosts to be used in this play
        self._play_hosts = None
Ejemplo n.º 16
0
    def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
        def not_omit_token(value):
            return value != self.runner.omit_token

        (
        sources_complex_args_list,
        passthru_complex_args_map
        ) = self._partition_options(complex_args)

        sources_complex_args_map = self._make_sources_map(sources_complex_args_list)
        sources_complex_args_map = self._filter_recursive(not_omit_token, sources_complex_args_map)
        passthru_complex_args_map = self._filter_recursive(not_omit_token, passthru_complex_args_map)

        (
        sources_module_args_hash_list,
        passthru_module_args_hash
        ) = self._partition_options(utils.parse_kv(module_args))

        sources_module_args_map = self._make_sources_map(sources_module_args_hash_list)
        sources_module_args_map = self._filter_recursive(not_omit_token, sources_module_args_map)
        passthru_module_args_hash = self._filter_recursive(not_omit_token, passthru_module_args_hash)

        sources_options_map = utils.merge_hash(sources_complex_args_map, sources_module_args_map)
        passthru_options_map = utils.merge_hash(passthru_complex_args_map, passthru_module_args_hash)

        skip_action_plugin = utils.boolean(passthru_options_map.get('skip_action_plugin', False))
        try:
            del(passthru_options_map['skip_action_plugin'])
        except KeyError:
            pass

        passthru_options_keys = passthru_options_map.keys()
        if len(passthru_options_keys) > 1:
            raise errors.AnsibleError("Only one module can be run at a time; saw modules: %s"
                                      % ', '.join(passthru_options_keys))

        # Iterate over 'copy' files
        for src, options in sources_options_map.iteritems():
            # Construct remote filesystem path
            dest = options.get('dest', None)

            if dest is None:
                if tmp is None:
                    tmp = self.runner._make_tmp_path(conn)
                dest = tmp
            # Interpret relative paths as starting with the remote tmp
            # directory
            elif not dest.startswith('/'):
                if tmp is None:
                    tmp = self.runner._make_tmp_path(conn)
                os.path.join(tmp, dest)

            copy_module_args_hash = sources_module_args_map.get(src, {})
            copy_module_args_hash.update(dict(dest=dest))
            copy_module_args = utils.serialize_args(copy_module_args_hash)
            copy_complex_args = sources_complex_args_map.get(src, None)

            # Copy source to destination.
            #
            # XXX because the 'copy' action_plugin doesn't pass through
            # persist_files or delete_remote_tmp, we need to make a temporary
            # adjustment to C.DEFAULT_KEEP_REMOTE_FILES.  The 'as' clause is
            # necessary in order to affect C.DEFAULT_KEEP_REMOTE_FILES in the
            # scope of ansible.runner.
            return_data = None
            with tmp_keep_remote_files(True) as C.DEFAULT_KEEP_REMOTE_FILES:
                return_data = self._copy(conn, tmp, 'copy', copy_module_args, inject,
                                 complex_args=copy_complex_args)

            # Fail here if files weren't copied over correctly
            if not return_data.is_successful():
                return return_data, 'copy', copy_module_args, copy_complex_args

        for passthru_module_name, passthru_options in passthru_options_map.iteritems():
            passthru_complex_args = passthru_options_map.get(passthru_module_name, None)
            passthru_module_args = utils.serialize_args(passthru_module_args_hash)

            # Handle things like 'command: do_something'
            if not isinstance(passthru_complex_args, dict):
                if isinstance(passthru_complex_args, basestring):
                    passthru_module_args = passthru_complex_args
                passthru_complex_args = None

            # Instantiate the action_plugin for the wanted module
            return_data = None
            if not skip_action_plugin:
                passthru_handler = utils.plugins.action_loader.get(passthru_module_name, self.runner)
                if passthru_handler:
                    try:
                        return_data = passthru_handler.run(conn, tmp, passthru_module_name,
                                                    passthru_module_args, inject,
                                                    complex_args=passthru_complex_args,
                                                    **kwargs)
                    except Exception as err:
                        return_data = ReturnData(conn=conn, result=dict(failed=True, msg="Encountered error in %s module: %s" %
                                                             (passthru_module_name, str(err))))

            else:
                try:
                    return_data = self.runner._execute_module(conn, tmp, passthru_module_name, passthru_module_args,
                                                              inject=inject, complex_args=passthru_complex_args, **kwargs)
                except Exception as err:
                    return_data = ReturnData(conn=conn, result=dict(failed=True, msg="Encountered error in %s module: %s" %
                                                            (passthru_module_name, str(err))))

            return return_data, passthru_module_name, passthru_module_args, passthru_complex_args
Ejemplo n.º 17
0
                    else:
                        results.update(data)
                    break

        # load vars in inventory_dir/hosts_vars/name_of_host
        p = os.path.join(basedir, "host_vars/%s" % host.name)
        paths = [p, '.'.join([p, 'yml']), '.'.join([p, 'yaml'])]
        for path in paths:
            if os.path.exists(path):
                data = utils.parse_yaml_from_file(path)
                if type(data) != dict:
                    raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
<<<<<<< HEAD
                if C.DEFAULT_HASH_BEHAVIOUR == "merge":
                    # let data content override results if needed
                    results = utils.merge_hash(results, data)
                else:
                    results.update(data)
                break
=======
                results = utils.combine_vars(results, data);

        # load vars in inventory_dir/hosts_vars/name_of_host
        path = os.path.join(basedir, "host_vars/%s" % host.name)
        if os.path.exists(path):
            data = utils.parse_yaml_from_file(path)
            if type(data) != dict:
                raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
            results = utils.combine_vars(results, data);
>>>>>>> remote
        return results