def _get_include_info(self, play_ds, basedir, existing_vars={}): ''' Gets any key=value pairs specified with the included file name and returns the merged vars along with the path ''' new_vars = existing_vars.copy() tokens = split_args(play_ds.get('include', '')) for t in tokens[1:]: try: (k,v) = unquote(t).split("=", 1) new_vars[k] = template(basedir, v, new_vars) except ValueError, e: raise errors.AnsibleError('included playbook variables must be in the form k=v, got: %s' % t)
def parse_kv(args): ''' convert a string of key/value items to a dict ''' options = {} if args is not None: try: vargs = split_args(args) except ValueError, ve: if 'no closing quotation' in str(ve).lower(): raise errors.AnsibleError("error parsing argument string, try quoting the entire line.") else: raise for x in vargs: if "=" in x: k, v = x.split("=",1) options[k.strip()] = unquote(v.strip())
def _load_tasks(self, tasks, vars=None, default_vars=None, sudo_vars=None, additional_conditions=None, original_file=None, role_name=None): ''' handle task and handler include statements ''' results = [] if tasks is None: # support empty handler files, and the like. tasks = [] if additional_conditions is None: additional_conditions = [] if vars is None: vars = {} if default_vars is None: default_vars = {} if sudo_vars is None: sudo_vars = {} old_conditions = list(additional_conditions) for x in tasks: # prevent assigning the same conditions to each task on an include included_additional_conditions = list(old_conditions) if not isinstance(x, dict): raise errors.AnsibleError( "expecting dict; got: %s, error in %s" % (x, original_file)) # evaluate sudo vars for current and child tasks included_sudo_vars = {} for k in ["sudo", "sudo_user"]: if k in x: included_sudo_vars[k] = x[k] elif k in sudo_vars: included_sudo_vars[k] = sudo_vars[k] x[k] = sudo_vars[k] if 'meta' in x: if x['meta'] == 'flush_handlers': results.append(Task(self, x)) continue task_vars = self.vars.copy() task_vars.update(vars) if original_file: task_vars['_original_file'] = original_file if 'include' in x: tokens = split_args(str(x['include'])) included_additional_conditions = list(additional_conditions) include_vars = {} for k in x: if k.startswith("with_"): if original_file: offender = " (in %s)" % original_file else: offender = "" utils.deprecated( "include + with_items is a removed deprecated feature" + offender, "1.5", removed=True) elif k.startswith("when_"): utils.deprecated( "\"when_<criteria>:\" is a removed deprecated feature, use the simplified 'when:' conditional directly", None, removed=True) elif k == 'when': if type(x[k]) is str: included_additional_conditions.insert(0, x[k]) elif type(x[k]) is list: for i in x[k]: included_additional_conditions.insert(0, i) elif k in ("include", "vars", "default_vars", "sudo", "sudo_user", "role_name", "no_log"): continue else: include_vars[k] = x[k] default_vars = x.get('default_vars', {}) if not default_vars: default_vars = self.default_vars else: default_vars = utils.combine_vars(self.default_vars, default_vars) # append the vars defined with the include (from above) # as well as the old-style 'vars' element. The old-style # vars are given higher precedence here (just in case) task_vars = utils.combine_vars(task_vars, include_vars) if 'vars' in x: task_vars = utils.combine_vars(task_vars, x['vars']) if 'when' in x: if isinstance(x['when'], (basestring, bool)): included_additional_conditions.append(x['when']) elif isinstance(x['when'], list): included_additional_conditions.extend(x['when']) new_role = None if 'role_name' in x: new_role = x['role_name'] mv = task_vars.copy() for t in tokens[1:]: (k, v) = t.split("=", 1) v = unquote(v) mv[k] = template(self.basedir, v, mv) dirname = self.basedir if original_file: dirname = os.path.dirname(original_file) include_file = template(dirname, tokens[0], mv) include_filename = utils.path_dwim(dirname, include_file) data = utils.parse_yaml_from_file( include_filename, vault_password=self.vault_password) if 'role_name' in x and data is not None: for y in data: if isinstance(y, dict) and 'include' in y: y['role_name'] = new_role loaded = self._load_tasks(data, mv, default_vars, included_sudo_vars, list(included_additional_conditions), original_file=include_filename, role_name=new_role) results += loaded elif type(x) == dict: task = Task(self, x, module_vars=task_vars, default_vars=default_vars, additional_conditions=list(additional_conditions), role_name=role_name) results.append(task) else: raise Exception("unexpected task type") for x in results: if self.tags is not None: x.tags.extend(self.tags) return results
def _load_tasks(self, tasks, vars=None, default_vars=None, sudo_vars=None, additional_conditions=None, original_file=None, role_name=None): ''' handle task and handler include statements ''' results = [] if tasks is None: # support empty handler files, and the like. tasks = [] if additional_conditions is None: additional_conditions = [] if vars is None: vars = {} if default_vars is None: default_vars = {} if sudo_vars is None: sudo_vars = {} old_conditions = list(additional_conditions) for x in tasks: # prevent assigning the same conditions to each task on an include included_additional_conditions = list(old_conditions) if not isinstance(x, dict): raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file)) # evaluate sudo vars for current and child tasks included_sudo_vars = {} for k in ["sudo", "sudo_user"]: if k in x: included_sudo_vars[k] = x[k] elif k in sudo_vars: included_sudo_vars[k] = sudo_vars[k] x[k] = sudo_vars[k] if 'meta' in x: if x['meta'] == 'flush_handlers': results.append(Task(self, x)) continue task_vars = self.vars.copy() task_vars.update(vars) if original_file: task_vars['_original_file'] = original_file if 'include' in x: tokens = split_args(str(x['include'])) included_additional_conditions = list(additional_conditions) include_vars = {} for k in x: if k.startswith("with_"): if original_file: offender = " (in %s)" % original_file else: offender = "" utils.deprecated("include + with_items is a removed deprecated feature" + offender, "1.5", removed=True) elif k.startswith("when_"): utils.deprecated("\"when_<criteria>:\" is a removed deprecated feature, use the simplified 'when:' conditional directly", None, removed=True) elif k == 'when': if isinstance(x[k], (basestring, bool)): included_additional_conditions.append(x[k]) elif type(x[k]) is list: included_additional_conditions.extend(x[k]) elif k in ("include", "vars", "default_vars", "sudo", "sudo_user", "role_name", "no_log"): continue else: include_vars[k] = x[k] default_vars = x.get('default_vars', {}) if not default_vars: default_vars = self.default_vars else: default_vars = utils.combine_vars(self.default_vars, default_vars) # append the vars defined with the include (from above) # as well as the old-style 'vars' element. The old-style # vars are given higher precedence here (just in case) task_vars = utils.combine_vars(task_vars, include_vars) if 'vars' in x: task_vars = utils.combine_vars(task_vars, x['vars']) new_role = None if 'role_name' in x: new_role = x['role_name'] mv = task_vars.copy() for t in tokens[1:]: (k,v) = t.split("=", 1) v = unquote(v) mv[k] = template(self.basedir, v, mv) dirname = self.basedir if original_file: dirname = os.path.dirname(original_file) include_file = template(dirname, tokens[0], mv) include_filename = utils.path_dwim(dirname, include_file) data = utils.parse_yaml_from_file(include_filename, vault_password=self.vault_password) if 'role_name' in x and data is not None: for y in data: if isinstance(y, dict) and 'include' in y: y['role_name'] = new_role loaded = self._load_tasks(data, mv, default_vars, included_sudo_vars, list(included_additional_conditions), original_file=include_filename, role_name=new_role) results += loaded elif type(x) == dict: task = Task( self, x, module_vars=task_vars, default_vars=default_vars, additional_conditions=list(additional_conditions), role_name=role_name ) results.append(task) else: raise Exception("unexpected task type") for x in results: if self.tags is not None: x.tags.extend(self.tags) return results
class PlayBook(object): ''' runs an ansible playbook, given as a datastructure or YAML filename. A playbook is a deployment, config management, or automation based set of commands to run in series. multiple plays/tasks do not execute simultaneously, but tasks in each pattern do execute in parallel (according to the number of forks requested) among the hosts they address ''' # ***************************************************** def __init__( self, playbook=None, host_list=C.DEFAULT_HOST_LIST, module_path=None, forks=C.DEFAULT_FORKS, timeout=C.DEFAULT_TIMEOUT, remote_user=C.DEFAULT_REMOTE_USER, remote_pass=C.DEFAULT_REMOTE_PASS, remote_port=None, transport=C.DEFAULT_TRANSPORT, private_key_file=C.DEFAULT_PRIVATE_KEY_FILE, callbacks=None, runner_callbacks=None, stats=None, extra_vars=None, only_tags=None, skip_tags=None, subset=C.DEFAULT_SUBSET, inventory=None, check=False, diff=False, any_errors_fatal=False, vault_password=False, force_handlers=False, # privelege escalation become=C.DEFAULT_BECOME, become_method=C.DEFAULT_BECOME_METHOD, become_user=C.DEFAULT_BECOME_USER, become_pass=None, ): """ playbook: path to a playbook file host_list: path to a file like /etc/ansible/hosts module_path: path to ansible modules, like /usr/share/ansible/ forks: desired level of parallelism timeout: connection timeout remote_user: run as this user if not specified in a particular play remote_pass: use this remote password (for all plays) vs using SSH keys remote_port: default remote port to use if not specified with the host or play transport: how to connect to hosts that don't specify a transport (local, paramiko, etc) callbacks output callbacks for the playbook runner_callbacks: more callbacks, this time for the runner API stats: holds aggregrate data about events occurring to each host inventory: can be specified instead of host_list to use a pre-existing inventory object check: don't change anything, just try to detect some potential changes any_errors_fatal: terminate the entire execution immediately when one of the hosts has failed force_handlers: continue to notify and run handlers even if a task fails """ self.SETUP_CACHE = SETUP_CACHE self.VARS_CACHE = VARS_CACHE arguments = [] if playbook is None: arguments.append('playbook') if callbacks is None: arguments.append('callbacks') if runner_callbacks is None: arguments.append('runner_callbacks') if stats is None: arguments.append('stats') if arguments: raise Exception('PlayBook missing required arguments: %s' % ', '.join(arguments)) if extra_vars is None: extra_vars = {} if only_tags is None: only_tags = ['all'] if skip_tags is None: skip_tags = [] self.check = check self.diff = diff self.module_path = module_path self.forks = forks self.timeout = timeout self.remote_user = remote_user self.remote_pass = remote_pass self.remote_port = remote_port self.transport = transport self.callbacks = callbacks self.runner_callbacks = runner_callbacks self.stats = stats self.extra_vars = extra_vars self.global_vars = {} self.private_key_file = private_key_file self.only_tags = only_tags self.skip_tags = skip_tags self.any_errors_fatal = any_errors_fatal self.vault_password = vault_password self.force_handlers = force_handlers self.become = become self.become_method = become_method self.become_user = become_user self.become_pass = become_pass self.callbacks.playbook = self self.runner_callbacks.playbook = self if inventory is None: self.inventory = ansible.inventory.Inventory(host_list) self.inventory.subset(subset) else: self.inventory = inventory if self.module_path is not None: utils.plugins.module_finder.add_directory(self.module_path) self.basedir = os.path.dirname(playbook) or '.' utils.plugins.push_basedir(self.basedir) # let inventory know the playbook basedir so it can load more vars self.inventory.set_playbook_basedir(self.basedir) vars = extra_vars.copy() vars['playbook_dir'] = os.path.abspath(self.basedir) if self.inventory.basedir() is not None: vars['inventory_dir'] = self.inventory.basedir() if self.inventory.src() is not None: vars['inventory_file'] = self.inventory.src() self.filename = playbook (self.playbook, self.play_basedirs) = self._load_playbook_from_file(playbook, vars) ansible.callbacks.load_callback_plugins() ansible.callbacks.set_playbook(self.callbacks, self) self._ansible_version = utils.version_info(gitinfo=True) # ***************************************************** def _get_playbook_vars(self, play_ds, existing_vars): ''' Gets the vars specified with the play and blends them with any existing vars that have already been read in ''' new_vars = existing_vars.copy() if 'vars' in play_ds: if isinstance(play_ds['vars'], dict): new_vars.update(play_ds['vars']) elif isinstance(play_ds['vars'], list): for v in play_ds['vars']: new_vars.update(v) return new_vars # ***************************************************** def _get_include_info(self, play_ds, basedir, existing_vars={}): ''' Gets any key=value pairs specified with the included file name and returns the merged vars along with the path ''' new_vars = existing_vars.copy() tokens = split_args(play_ds.get('include', '')) for t in tokens[1:]: try: (k, v) = unquote(t).split("=", 1) new_vars[k] = template(basedir, v, new_vars) except ValueError, e: raise errors.AnsibleError( 'included playbook variables must be in the form k=v, got: %s' % t) return (new_vars, unquote(tokens[0]))