def _get_role_path(self, role): """ Returns the path on disk to the directory containing the role directories like tasks, templates, etc. Also returns any variables that were included with the role """ orig_path = template(self.basedir,role,self.vars) role_vars = {} if type(orig_path) == dict: # what, not a path? role_name = orig_path.get('role', None) if role_name is None: raise errors.AnsibleError("expected a role name in dictionary: %s" % orig_path) role_vars = orig_path orig_path = role_name path = utils.path_dwim(self.basedir, os.path.join('roles', orig_path)) if not os.path.isdir(path) and not orig_path.startswith(".") and not orig_path.startswith("/"): path2 = utils.path_dwim(self.basedir, orig_path) if not os.path.isdir(path2): raise errors.AnsibleError("cannot find role in %s or %s" % (path, path2)) path = path2 elif not os.path.isdir(path): raise errors.AnsibleError("cannot find role in %s" % (path)) return (path, role_vars)
def _rolepath(basedir, role): role_path = None possible_paths = [ # if included from a playbook path_dwim(basedir, os.path.join('roles', role)), path_dwim(basedir, role), # if included from roles/[role]/meta/main.yml path_dwim( basedir, os.path.join('..', '..', '..', 'roles', role) ), path_dwim(basedir, os.path.join('..', '..', role)) ] if C.DEFAULT_ROLES_PATH: search_locations = C.DEFAULT_ROLES_PATH.split(os.pathsep) for loc in search_locations: loc = os.path.expanduser(loc) possible_paths.append(path_dwim(loc, role)) for path_option in possible_paths: if os.path.isdir(path_option): role_path = path_option break return role_path
def generate_filenames(host, inject, filename): """ Render the raw filename into 3 forms """ # filename2 is the templated version of the filename, which will # be fully rendered if any variables contained within it are # non-inventory related filename2 = template(self.basedir, filename, self.vars) # filename3 is the same as filename2, but when the host object is # available, inventory variables will be expanded as well since the # name is templated with the injected variables filename3 = filename2 if host is not None: filename3 = template(self.basedir, filename2, inject) # filename4 is the dwim'd path, but may also be mixed-scope, so we use # both play scoped vars and host scoped vars to template the filepath if self._has_vars_in(filename3) and host is not None: inject.update(self.vars) filename4 = template(self.basedir, filename3, inject) filename4 = utils.path_dwim(self.basedir, filename4) else: filename4 = utils.path_dwim(self.basedir, filename3) return filename2, filename3, filename4
def _rolepath(basedir, role): role_path = None possible_paths = [ # if included from a playbook path_dwim(basedir, os.path.join('roles', role)), path_dwim(basedir, role), # if included from roles/[role]/meta/main.yml path_dwim( basedir, os.path.join('..', '..', '..', 'roles', role) ), path_dwim(basedir, os.path.join('..', '..', role)) ] if C.DEFAULT_ROLES_PATH: search_locations = C.DEFAULT_ROLES_PATH if isinstance(search_locations, basestring): search_locations = search_locations.split(os.pathsep) for loc in search_locations: loc = os.path.expanduser(loc) possible_paths.append(path_dwim(loc, role)) for path_option in possible_paths: if os.path.isdir(path_option): role_path = path_option break if role_path: _load_library_if_exists(os.path.join(role_path, 'library')) return role_path
def get_paths(self, inject): paths = [] for path in C.get_config(C.p, C.DEFAULTS, 'lookup_file_paths', None, [], islist=True): path = utils.unfrackpath(path) if os.path.exists(path): paths.append(path) if '_original_file' in inject: # check the templates and vars directories too, # if they exist for roledir in ('templates', 'vars'): path = utils.path_dwim(self.basedir, os.path.join(self.basedir, '..', roledir)) if os.path.exists(path): paths.append(path) if 'playbook_dir' in inject: paths.append(inject['playbook_dir']) paths.append(utils.path_dwim(self.basedir, '')) unq = [] [unq.append(i) for i in paths if not unq.count(i)] return unq
def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0): # this number is arbitrary, but it seems sane if level > 20: raise errors.AnsibleError("too many levels of recursion while resolving role dependencies") for role in roles: role_path,role_vars = self._get_role_path(role) # the meta directory contains the yaml that should # hold the list of dependencies (if any) meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta'))) if os.path.isfile(meta): data = utils.parse_yaml_from_file(meta) if data: dependencies = data.get('dependencies',[]) for dep in dependencies: (dep_path,dep_vars) = self._get_role_path(dep) vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars'))) vars_data = {} if os.path.isfile(vars): vars_data = utils.parse_yaml_from_file(vars) dep_vars.update(role_vars) for k in passed_vars.keys(): if not k in dep_vars: dep_vars[k] = passed_vars[k] for k in vars_data.keys(): if not k in dep_vars: dep_vars[k] = vars_data[k] if 'role' in dep_vars: del dep_vars['role'] self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1) dep_stack.append([dep,dep_path,dep_vars]) # only add the current role when we're at the top level, # otherwise we'll end up in a recursive loop if level == 0: dep_stack.append([role,role_path,role_vars]) return dep_stack
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for template operations ''' # note: since this module just calls the copy module, the --check mode support # can be implemented entirely over there if not self.runner.is_playbook: raise errors.AnsibleError("in current versions of ansible, templates are only usable in playbooks") # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) if (source is None and 'first_available_file' not in inject) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, comm_ok=False, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in self.runner.module_vars.get('first_available_file'): fn_orig = fn fnt = template.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if not os.path.exists(fnd) and '_original_file' in inject: fnd = utils.path_dwim_relative(inject['_original_file'], 'templates', fnt, self.runner.basedir, check=False) if os.path.exists(fnd): source = fnd found = True break if not found: result = dict(failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, comm_ok=False, result=result) else: source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'templates', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) if dest.endswith("/"): # CCTODO: Fix path for Windows hosts. base = os.path.basename(source) dest = os.path.join(dest, base) # template the source data locally & get ready to transfer try: resultant = template.template_from_file(self.runner.basedir, source, inject, vault_password=self.runner.vault_pass) except Exception, e: result = dict(failed=True, msg=type(e).__name__ + ": " + str(e)) return ReturnData(conn=conn, comm_ok=False, result=result)
def _include_children(basedir, k, v, parent_type): # handle include: filename.yml tags=blah (command, args, kwargs) = tokenize("{0}: {1}".format(k, v)) result = path_dwim(basedir, args[0]) if not os.path.exists(result) and not basedir.endswith('tasks'): result = path_dwim(os.path.join(basedir, '..', 'tasks'), v) return [{'path': result, 'type': parent_type}]
def _load_tasks(self, ds, keyname): ''' handle task and handler include statements ''' tasks = ds.get(keyname, []) results = [] for x in tasks: if 'include' in x: task_vars = self.vars.copy() tokens = shlex.split(x['include']) if 'with_items' in x: items = utils.varReplaceWithItems(self.basedir, x['with_items'], task_vars) else: items = [''] for item in items: mv = task_vars.copy() mv['item'] = item for t in tokens[1:]: (k,v) = t.split("=", 1) mv[k] = utils.varReplaceWithItems(self.basedir, v, mv) include_file = utils.template(self.basedir, tokens[0], mv) data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file)) for y in data: results.append(Task(self,y,module_vars=mv.copy())) elif type(x) == dict: task_vars = self.vars.copy() results.append(Task(self,x,module_vars=task_vars)) else: raise Exception("unexpected task type") for x in results: if self.tags is not None: x.tags.extend(self.tags) return results
def _execute_fetch(self, conn, host, tmp): ''' handler for fetch operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: return (host, True, dict(failed=True, msg="src and dest are required"), '') # files are saved in dest dir, with a subdir for each host, then the filename filename = os.path.basename(source) dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), host, filename) # compare old and new md5 for support of change hooks local_md5 = None if os.path.exists(dest): local_md5 = os.popen("md5sum %s" % dest).read().split()[0] remote_md5 = self._exec_command(conn, "md5sum %s" % source, tmp, True)[0].split()[0] if remote_md5 != local_md5: # create the containing directories, if needed os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes conn.fetch_file(source, dest) new_md5 = os.popen("md5sum %s" % dest).read().split()[0] changed = (new_md5 != local_md5) if new_md5 != remote_md5: return (host, True, dict(failed=True, msg="md5 mismatch", md5sum=new_md5), '') return (host, True, dict(changed=True, md5sum=new_md5), '') else: return (host, True, dict(changed=False, md5sum=local_md5), '')
def run(self, terms, inject=None, **kwargs): terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) ret = [] for term in terms: # you can't have escaped spaces in yor pathname params = term.split() relpath = params[0] paramvals = { 'length': LookupModule.LENGTH, 'encrypt': None, 'chars': ['ascii_letters','digits',".,:-_"], } # get non-default parameters if specified try: for param in params[1:]: name, value = param.split('=') assert(name in paramvals) if name == 'length': paramvals[name] = int(value) elif name == 'chars': use_chars=[] if ",," in value: use_chars.append(',') use_chars.extend(value.replace(',,',',').split(',')) paramvals['chars'] = use_chars else: paramvals[name] = value except (ValueError, AssertionError), e: raise errors.AnsibleError(e) length = paramvals['length'] encrypt = paramvals['encrypt'] use_chars = paramvals['chars'] # get password or create it if file doesn't exist path = utils.path_dwim(self.basedir, relpath) if not os.path.exists(path): pathdir = os.path.dirname(path) if not os.path.isdir(pathdir): try: os.makedirs(pathdir, mode=0700) except OSError, e: raise errors.AnsibleError("cannot create the path for the password lookup: %s (error was %s)" % (pathdir, str(e))) chars = "".join([getattr(string,c,c) for c in use_chars]).replace('"','').replace("'",'') password = ''.join(random.choice(chars) for _ in range(length)) if encrypt is not None: salt = self.random_salt() content = '%s salt=%s' % (password, salt) else: content = password with open(path, 'w') as f: os.chmod(path, 0600) f.write(content + '\n')
def _execute_copy(self, conn, host, tmp): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: return (host, True, dict(failed=True, msg="src and dest are required"), '') # transfer the file to a remote tmp location tmp_src = tmp + source.split('/')[-1] conn.put_file(utils.path_dwim(self.basedir, source), tmp_src) # install the copy module self.module_name = 'copy' module = self._transfer_module(conn, tmp, 'copy') # run the copy module args = "src=%s dest=%s" % (tmp_src, dest) (result1, err, executed) = self._execute_module(conn, tmp, module, args) (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed) if ok: return self._chain_file_module(conn, tmp, data, err, options, executed) else: return (host, ok, data, err)
def _execute_template(self, conn, host, tmp): """ handler for template operations """ # load up options options = utils.parse_kv(self.module_args) source = options["src"] dest = options["dest"] metadata = options.get("metadata", None) if metadata is None: if self.remote_user == "root": metadata = "/etc/ansible/setup" else: metadata = "~/.ansible/setup" # first copy the source template over temppath = tmp + os.path.split(source)[-1] self._transfer_file(conn, utils.path_dwim(self.basedir, source), temppath) # install the template module template_module = self._transfer_module(conn, tmp, "template") # run the template module args = ["src=%s" % temppath, "dest=%s" % dest, "metadata=%s" % metadata] (result1, executed) = self._execute_module(conn, tmp, template_module, args) (host, ok, data) = self._return_from_module(conn, host, result1, executed) if ok: return self._chain_file_module(conn, tmp, data, options, executed) else: return (host, ok, data)
def find_children(playbook, playbook_dir): if not os.path.exists(playbook[0]): return [] if playbook[1] == 'role': playbook_ds = {'roles': [{'role': playbook[0]}]} else: try: playbook_ds = parse_yaml_from_file(playbook[0]) except AnsibleError as e: raise SystemExit(str(e)) results = [] basedir = os.path.dirname(playbook[0]) items = _playbook_items(playbook_ds) for item in items: for child in play_children(basedir, item, playbook[1], playbook_dir): if "$" in child['path'] or "{{" in child['path']: continue valid_tokens = list() for token in split_args(child['path']): if '=' in token: break valid_tokens.append(token) path = ' '.join(valid_tokens) results.append({ 'path': path_dwim(basedir, path), 'type': child['type'] }) return results
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): if not module_args: result = dict(failed=True, msg="No source file given") return ReturnData(conn=conn, comm_ok=True, result=result) source = module_args source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'vars', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) if os.path.exists(source): data = utils.parse_yaml_from_file(source, vault_password=self.runner.vault_pass) if data and type(data) != dict: raise errors.AnsibleError("%s must be stored as a dictionary/hash" % source) elif data is None: data = {} result = dict(ansible_facts=data) return ReturnData(conn=conn, comm_ok=True, result=result) else: result = dict(failed=True, msg="Source file not found.", file=source) return ReturnData(conn=conn, comm_ok=True, result=result)
def _execute_copy(self, conn, host, tmp): """ handler for file transfer operations """ # load up options options = utils.parse_kv(self.module_args) source = options["src"] dest = options["dest"] # transfer the file to a remote tmp location tmp_src = tmp + source.split("/")[-1] self._transfer_file(conn, utils.path_dwim(self.basedir, source), tmp_src) # install the copy module self.module_name = "copy" module = self._transfer_module(conn, tmp, "copy") # run the copy module args = ["src=%s" % tmp_src, "dest=%s" % dest] (result1, executed) = self._execute_module(conn, tmp, module, args) (host, ok, data) = self._return_from_module(conn, host, result1, executed) if ok: return self._chain_file_module(conn, tmp, data, options, executed) else: return (host, ok, data)
def run(self, terms, inject=None, **kwargs): terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) ret = [] if not isinstance(terms, list): terms = [terms] for term in terms: basedir_path = utils.path_dwim(self.basedir, term) relative_path = None playbook_path = None if '_original_file' in inject: relative_path = utils.path_dwim_relative(inject['_original_file'], 'files', term, self.basedir, check=False) if 'playbook_dir' in inject: playbook_path = os.path.join(inject['playbook_dir'], term) for path in (basedir_path, relative_path, playbook_path): if path and os.path.exists(path): vaultlib = inject['_ploy_instance'].get_vault_lib() with open(path) as f: data = f.read() if vaultlib.is_encrypted(data): data = vaultlib.decrypt(data) try: data = data.decode('utf8') except UnicodeDecodeError as e: raise errors.AnsibleError("UnicodeDecodeError encrypted file lookup, only ascii and utf8 supported: %s\n%s" % (term, e)) ret.append(data) break else: raise errors.AnsibleError("could not locate encrypted file in lookup: %s" % term) return ret
def _load_tasks(self, ds, keyname): ''' handle task and handler include statements ''' tasks = ds.get(keyname, []) results = [] for x in tasks: task_vars = self.vars.copy() if 'include' in x: tokens = shlex.split(x['include']) for t in tokens[1:]: (k,v) = t.split("=", 1) task_vars[k]=v include_file = tokens[0] data = utils.parse_yaml_from_file(utils.path_dwim(self.playbook.basedir, include_file)) elif type(x) == dict: data = [x] else: raise Exception("unexpected task type") for y in data: items = y.get('with_items',None) if items is None: items = [ '' ] elif isinstance(items, basestring): items = utils.varLookup(items, task_vars) for item in items: mv = task_vars.copy() mv['item'] = item results.append(Task(self,y,module_vars=mv)) return results
def _load_playbook_from_file(self, path): ''' do some top level error checking on playbooks and allow them to include other playbooks. ''' playbook_data = utils.parse_yaml_from_file(path) accumulated_plays = [] if type(playbook_data) != list: raise errors.AnsibleError( "parse error: playbooks must be formatted as a YAML list" ) for play in playbook_data: if type(play) != dict: raise errors.AnsibleError( "parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play ) if 'include' in play: if len(play.keys()) == 1: included_path = utils.path_dwim(self.basedir, play['include']) accumulated_plays.extend(self._load_playbook_from_file(included_path)) else: raise errors.AnsibleError( "parse error: top level includes cannot be used with other directives: %s" % play ) else: accumulated_plays.append(play) return accumulated_plays
def generate_filenames(host, inject, filename): """ Render the raw filename into 3 forms """ filename2 = template(self.basedir, filename, self.vars) filename3 = filename2 if host is not None: filename3 = template(self.basedir, filename2, inject) if self._has_vars_in(filename3) and host is not None: # allow play scoped vars and host scoped vars to template the filepath inject.update(self.vars) filename4 = template(self.basedir, filename3, inject) filename4 = utils.path_dwim(self.basedir, filename4) else: filename4 = utils.path_dwim(self.basedir, filename3) return filename2, filename3, filename4
def _execute_template(self, conn, host, tmp): ''' handler for template operations ''' # load up options options = utils.parse_kv(self.module_args) source = options['src'] dest = options['dest'] metadata = options.get('metadata', None) if metadata is None: if self.remote_user == 'root': metadata = '/etc/ansible/setup' else: metadata = '~/.ansible/setup' # first copy the source template over temppath = tmp + os.path.split(source)[-1] conn.put_file(utils.path_dwim(self.basedir, source), temppath) # install the template module template_module = self._transfer_module(conn, tmp, 'template') # run the template module args = "src=%s dest=%s metadata=%s" % (temppath, dest, metadata) (result1, err, executed) = self._execute_module(conn, tmp, template_module, args) (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed) if ok: return self._chain_file_module(conn, tmp, data, err, options, executed) else: return (host, ok, data, err)
def run(self, conn, tmp, module_name, module_args, inject): ''' handler for file transfer operations ''' tokens = shlex.split(module_args) source = tokens[0] # FIXME: error handling args = " ".join(tokens[1:]) source = utils.template(self.runner.basedir, source, inject) source = utils.path_dwim(self.runner.basedir, source) # transfer the file to a remote tmp location source = source.replace('\x00','') # why does this happen here? args = args.replace('\x00','') # why does this happen here? tmp_src = os.path.join(tmp, os.path.basename(source)) tmp_src = tmp_src.replace('\x00', '') conn.put_file(source, tmp_src) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': prepcmd = 'chmod a+rx %s' % tmp_src else: prepcmd = 'chmod +x %s' % tmp_src # add preparation steps to one ssh roundtrip executing the script module_args = prepcmd + '; ' + tmp_src + ' ' + args handler = utils.plugins.action_loader.get('raw', self.runner) result = handler.run(conn, tmp, 'raw', module_args, inject) # clean up after if tmp.find("tmp") != -1 and C.DEFAULT_KEEP_REMOTE_FILES != '1': self.runner._low_level_exec_command(conn, 'rm -rf %s >/dev/null 2>&1' % tmp, tmp) return result
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): if not module_args: result = dict(failed=True, msg="No source file given") return ReturnData(conn=conn, comm_ok=True, result=result) source = template.template(self.runner.basedir, module_args, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'vars', source, self.runner.basedir, False) else: source = utils.path_dwim(self.runner.basedir, source) data = {} if os.path.exists(source): data = utils.parse_yaml_from_file(source, vault_password=self.runner.vault_pass) if data and type(data) != dict: raise errors.AnsibleError("%s must be stored as a dictionary/hash" % source) if not hasattr(conn.runner, 'mergeBuffer'): conn.runner.mergeBuffer = {} if conn.host in conn.runner.mergeBuffer: data = utils.merge_hash(conn.runner.mergeBuffer[conn.host], data) conn.runner.mergeBuffer[conn.host] = data result = dict(ansible_facts=data) return ReturnData(conn=conn, comm_ok=True, result=result)
def run_v1(self, terms, inject=None, **kwargs): """ Implements LookupModule run method for ansible v1.9. """ # flatten the terms if it's passed from with_yaml_file syntax terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) ret = [] # Check if it's basestring as ansible 1.9 supports badly on unicode if isinstance(terms, basestring): terms = [terms] for term in terms: params = term.split() yaml_file = params[0] paramvals = self._build_params(params[1:]) # make relative paths to absoluate path path = utils.path_dwim(self.basedir, yaml_file) data = self.read_yaml(path, **paramvals) if data is not None: if isinstance(data, list): ret.extend(data) else: ret.append(data) return ret
def run(self, terms, inject=None, **kwargs): terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) ret = [] # this can happen if the variable contains a string, strictly not desired for lookup # plugins, but users may try it, so make it work. if not isinstance(terms, list): terms = [ terms ] for term in terms: basedir_path = utils.path_dwim(self.basedir, term) relative_path = None playbook_path = None # Special handling of the file lookup, used primarily when the # lookup is done from a role. If the file isn't found in the # basedir of the current file, use dwim_relative to look in the # role/files/ directory, and finally the playbook directory # itself (which will be relative to the current working dir) if '_original_file' in inject: relative_path = utils.path_dwim_relative(inject['_original_file'], 'files', term, self.basedir, check=False) if 'playbook_dir' in inject: playbook_path = os.path.join(inject['playbook_dir'], term) for path in (basedir_path, relative_path, playbook_path): if path and os.path.exists(path): ret.append(path) break else: raise errors.AnsibleError("could not locate file in lookup: %s" % term) return ret
def template_from_file(basedir, path, vars): ''' run a file through the templating engine ''' from ansible import utils realpath = utils.path_dwim(basedir, path) loader=jinja2.FileSystemLoader([basedir,os.path.dirname(realpath)]) environment = jinja2.Environment(loader=loader, trim_blocks=True) for filter_plugin in utils.plugins.filter_loader.all(): filters = filter_plugin.filters() if not isinstance(filters, dict): raise errors.AnsibleError("FilterModule.filters should return a dict.") environment.filters.update(filters) try: data = codecs.open(realpath, encoding="utf8").read() except UnicodeDecodeError: raise errors.AnsibleError("unable to process as utf-8: %s" % realpath) except: raise errors.AnsibleError("unable to read %s" % realpath) # Get jinja env overrides from template if data.startswith(JINJA2_OVERRIDE): eol = data.find('\n') line = data[len(JINJA2_OVERRIDE):eol] data = data[eol+1:] for pair in line.split(','): (key,val) = pair.split(':') setattr(environment,key.strip(),val.strip()) environment.template_class = J2Template t = environment.from_string(data) vars = vars.copy() try: template_uid = pwd.getpwuid(os.stat(realpath).st_uid).pw_name except: template_uid = os.stat(realpath).st_uid vars['template_host'] = os.uname()[1] vars['template_path'] = realpath vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(realpath)) vars['template_uid'] = template_uid vars['template_fullpath'] = os.path.abspath(realpath) vars['template_run_date'] = datetime.datetime.now() managed_default = C.DEFAULT_MANAGED_STR managed_str = managed_default.format( host = vars['template_host'], uid = vars['template_uid'], file = vars['template_path'] ) vars['ansible_managed'] = time.strftime(managed_str, time.localtime(os.path.getmtime(realpath))) # This line performs deep Jinja2 magic that uses the _jinja2_vars object for vars # Ideally, this could use some API where setting shared=True and the object won't get # passed through dict(o), but I have not found that yet. res = jinja2.utils.concat(t.root_render_func(t.new_context(_jinja2_vars(basedir, vars, t.globals), shared=True))) if data.endswith('\n') and not res.endswith('\n'): res = res + '\n' return template(basedir, res, vars)
def _do_conditional_imports(self, vars_files, pattern=None): ''' handle the vars_files section, which can contain variables ''' # FIXME: save parsed variable results in memory to avoid excessive re-reading/parsing # FIXME: currently parses imports for hosts not in the pattern, that is not wrong, but it's # not super optimized yet either, because we wouldn't have hit them, ergo # it will raise false errors if there is no defaults variable file without any $vars # in it, which could happen on uncontacted hosts. if type(vars_files) != list: raise errors.AnsibleError("vars_files must be a list") host_list = [ h for h in self.inventory.list_hosts(pattern) if not (h in self.stats.failures or h in self.stats.dark) ] for host in host_list: cache_vars = SETUP_CACHE.get(host,{}) SETUP_CACHE[host] = cache_vars for filename in vars_files: if type(filename) == list: # loop over all filenames, loading the first one, and failing if # none found found = False sequence = [] for real_filename in filename: filename2 = utils.path_dwim(self.basedir, utils.template(real_filename, cache_vars, SETUP_CACHE)) sequence.append(filename2) if os.path.exists(filename2): found = True data = utils.parse_yaml_from_file(filename2) SETUP_CACHE[host].update(data) self.callbacks.on_import_for_host(host, filename2) break else: self.callbacks.on_not_import_for_host(host, filename2) if not found: raise errors.AnsibleError( "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence) ) else: filename2 = utils.path_dwim(self.basedir, utils.template(filename, cache_vars, SETUP_CACHE)) if not os.path.exists(filename2): raise errors.AnsibleError("no file matched for vars_file import: %s" % filename2) data = utils.parse_yaml_from_file(filename2) SETUP_CACHE[host].update(data) self.callbacks.on_import_for_host(host, filename2)
def run(self, terms, inject=None, **kwargs): terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) result = None anydict = False skip = False for term in terms: if isinstance(term, dict): anydict = True total_search = [] if anydict: for term in terms: if isinstance(term, dict): files = term.get('files', []) paths = term.get('paths', []) skip = utils.boolean(term.get('skip', False)) filelist = files if isinstance(files, basestring): files = files.replace(',', ' ') files = files.replace(';', ' ') filelist = files.split(' ') pathlist = paths if paths: if isinstance(paths, basestring): paths = paths.replace(',', ' ') paths = paths.replace(':', ' ') paths = paths.replace(';', ' ') pathlist = paths.split(' ') if not pathlist: total_search = filelist else: for path in pathlist: for fn in filelist: f = os.path.join(path, fn) total_search.append(f) else: total_search.append(term) else: total_search = terms result = None for fn in total_search: path = utils.path_dwim(self.basedir, fn) if os.path.exists(path): return [path] if not result: if skip: return [] else: return [None]
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) copy = utils.boolean(options.get('copy', 'yes')) if source is None or dest is None: result = dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) dest = os.path.expanduser(dest) # CCTODO: Fix path for Windows hosts. source = template.template(self.runner.basedir, os.path.expanduser(source), inject) if copy: if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) remote_md5 = self.runner._remote_md5(conn, tmp, dest) if remote_md5 != '3': result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest) return ReturnData(conn=conn, result=result) if copy: # transfer the file to a remote tmp location tmp_src = tmp + 'source' conn.put_file(source, tmp_src) # handle diff mode client side # handle check mode client side # fix file permissions when the copy is done as a different user if copy: if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root': if not self.runner.noop_on_check(inject): self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp) # Build temporary module_args. new_module_args = dict( src=tmp_src, original_basename=os.path.basename(source), ) # make sure checkmod is passed on correctly if self.runner.noop_on_check(inject): new_module_args['CHECKMODE'] = True module_args = utils.merge_module_args(module_args, new_module_args) else: module_args = "%s original_basename=%s" % (module_args, pipes.quote(os.path.basename(source))) # make sure checkmod is passed on correctly if self.runner.noop_on_check(inject): module_args += " CHECKMODE=True" return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
def _include_handlers(self, play, handler, dirname, new_handlers): ''' load handlers from external files ''' path = utils.path_dwim(dirname, handler['include']) inject_vars = self._get_vars(play, dirname) included = utils.template_from_file(path, inject_vars) included = utils.parse_yaml(included) for x in included: new_handlers.append(x)
def _load_roles(self, roles, ds): # a role is a name that auto-includes the following if they exist # <rolename>/tasks/main.yml # <rolename>/handlers/main.yml # <rolename>/vars/main.yml # <rolename>/library # and it auto-extends tasks/handlers/vars_files/module paths as appropriate if found if roles is None: roles = [] if type(roles) != list: raise errors.AnsibleError("value of 'roles:' must be a list") new_tasks = [] new_handlers = [] role_vars_files = [] defaults_files = [] pre_tasks = ds.get('pre_tasks', None) if type(pre_tasks) != list: pre_tasks = [] for x in pre_tasks: new_tasks.append(x) # flush handlers after pre_tasks new_tasks.append(dict(meta='flush_handlers')) roles = self._build_role_dependencies(roles, [], {}) # give each role an uuid and # make role_path available as variable to the task for idx, val in enumerate(roles): this_uuid = str(uuid.uuid4()) roles[idx][-3]['role_uuid'] = this_uuid roles[idx][-3]['role_path'] = roles[idx][1] role_names = [] for (role, role_path, role_vars, role_params, default_vars) in roles: # special vars must be extracted from the dict to the included tasks special_keys = [ "sudo", "sudo_user", "when", "with_items", "su", "su_user", "become", "become_user" ] special_vars = {} for k in special_keys: if k in role_vars: special_vars[k] = role_vars[k] task_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'tasks')) handler_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'handlers')) vars_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')) meta_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')) defaults_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')) task = self._resolve_main(task_basepath) handler = self._resolve_main(handler_basepath) vars_file = self._resolve_main(vars_basepath) meta_file = self._resolve_main(meta_basepath) defaults_file = self._resolve_main(defaults_basepath) library = utils.path_dwim(self.basedir, os.path.join(role_path, 'library')) missing = lambda f: not os.path.isfile(f) if missing(task) and missing(handler) and missing(vars_file) and missing(defaults_file) and missing(meta_file) and not os.path.isdir(library): raise errors.AnsibleError("found role at %s, but cannot find %s or %s or %s or %s or %s or %s" % (role_path, task, handler, vars_file, defaults_file, meta_file, library)) if isinstance(role, dict): role_name = role['role'] else: role_name = utils.role_spec_parse(role)["name"] role_names.append(role_name) if os.path.isfile(task): nt = dict(include=pipes.quote(task), vars=role_vars, role_params=role_params, default_vars=default_vars, role_name=role_name) for k in special_keys: if k in special_vars: nt[k] = special_vars[k] new_tasks.append(nt) if os.path.isfile(handler): nt = dict(include=pipes.quote(handler), vars=role_vars, role_params=role_params, role_name=role_name) for k in special_keys: if k in special_vars: nt[k] = special_vars[k] new_handlers.append(nt) if os.path.isfile(vars_file): role_vars_files.append(vars_file) if os.path.isfile(defaults_file): defaults_files.append(defaults_file) if os.path.isdir(library): utils.plugins.module_finder.add_directory(library) tasks = ds.get('tasks', None) post_tasks = ds.get('post_tasks', None) handlers = ds.get('handlers', None) vars_files = ds.get('vars_files', None) if type(tasks) != list: tasks = [] if type(handlers) != list: handlers = [] if type(vars_files) != list: vars_files = [] if type(post_tasks) != list: post_tasks = [] new_tasks.extend(tasks) # flush handlers after tasks + role tasks new_tasks.append(dict(meta='flush_handlers')) new_tasks.extend(post_tasks) # flush handlers after post tasks new_tasks.append(dict(meta='flush_handlers')) new_handlers.extend(handlers) ds['tasks'] = new_tasks ds['handlers'] = new_handlers ds['role_names'] = role_names self.role_vars = self._load_role_vars_files(role_vars_files) self.default_vars = self._load_role_defaults(defaults_files) return ds
def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0): # this number is arbitrary, but it seems sane if level > 20: raise errors.AnsibleError( "too many levels of recursion while resolving role dependencies" ) for role in roles: role_path, role_vars = self._get_role_path(role) role_vars = utils.combine_vars(role_vars, passed_vars) vars = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'vars'))) vars_data = {} if os.path.isfile(vars): vars_data = utils.parse_yaml_from_file(vars) if vars_data: role_vars = utils.combine_vars(vars_data, role_vars) defaults = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults'))) defaults_data = {} if os.path.isfile(defaults): defaults_data = utils.parse_yaml_from_file(defaults) # the meta directory contains the yaml that should # hold the list of dependencies (if any) meta = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'meta'))) if os.path.isfile(meta): data = utils.parse_yaml_from_file(meta) if data: dependencies = data.get('dependencies', []) for dep in dependencies: allow_dupes = False (dep_path, dep_vars) = self._get_role_path(dep) meta = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta'))) if os.path.isfile(meta): meta_data = utils.parse_yaml_from_file(meta) if meta_data: allow_dupes = utils.boolean( meta_data.get('allow_duplicates', '')) if not allow_dupes: if dep in self.included_roles: continue else: self.included_roles.append(dep) dep_vars = utils.combine_vars(passed_vars, dep_vars) dep_vars = utils.combine_vars(role_vars, dep_vars) vars = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars'))) vars_data = {} if os.path.isfile(vars): vars_data = utils.parse_yaml_from_file(vars) if vars_data: dep_vars = utils.combine_vars( vars_data, dep_vars) defaults = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults'))) dep_defaults_data = {} if os.path.isfile(defaults): dep_defaults_data = utils.parse_yaml_from_file( defaults) if 'role' in dep_vars: del dep_vars['role'] self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level + 1) dep_stack.append( [dep, dep_path, dep_vars, dep_defaults_data]) # only add the current role when we're at the top level, # otherwise we'll end up in a recursive loop if level == 0: self.included_roles.append(role) dep_stack.append([role, role_path, role_vars, defaults_data]) return dep_stack
def _update_vars_files_for_host(self, host): if type(self.vars_files) != list: self.vars_files = [ self.vars_files ] if host is not None: inject = {} inject.update(self.playbook.inventory.get_variables(host)) inject.update(self.playbook.SETUP_CACHE[host]) for filename in self.vars_files: if type(filename) == list: # loop over all filenames, loading the first one, and failing if # none found found = False sequence = [] for real_filename in filename: filename2 = template(self.basedir, real_filename, self.vars) filename3 = filename2 if host is not None: filename3 = template(self.basedir, filename2, inject) filename4 = utils.path_dwim(self.basedir, filename3) sequence.append(filename4) if os.path.exists(filename4): found = True data = utils.parse_yaml_from_file(filename4) if type(data) != dict: raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename4) if host is not None: if self._has_vars_in(filename2) and not self._has_vars_in(filename3): # this filename has variables in it that were fact specific # so it needs to be loaded into the per host SETUP_CACHE self.playbook.SETUP_CACHE[host].update(data) self.playbook.callbacks.on_import_for_host(host, filename4) elif not self._has_vars_in(filename4): # found a non-host specific variable, load into vars and NOT # the setup cache self.vars.update(data) elif host is not None: self.playbook.callbacks.on_not_import_for_host(host, filename4) if found: break if not found and host is not None: raise errors.AnsibleError( "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence) ) else: # just one filename supplied, load it! filename2 = template(self.basedir, filename, self.vars) filename3 = filename2 if host is not None: filename3 = template(self.basedir, filename2, inject) filename4 = utils.path_dwim(self.basedir, filename3) if self._has_vars_in(filename4): continue new_vars = utils.parse_yaml_from_file(filename4) if new_vars: if type(new_vars) != dict: raise errors.AnsibleError("%s must be stored as dictionary/hash: %s" % (filename4, type(new_vars))) if host is not None and self._has_vars_in(filename2) and not self._has_vars_in(filename3): # running a host specific pass and has host specific variables # load into setup cache self.playbook.SETUP_CACHE[host] = utils.combine_vars( self.playbook.SETUP_CACHE[host], new_vars) self.playbook.callbacks.on_import_for_host(host, filename4) elif host is None: # running a non-host specific pass and we can update the global vars instead self.vars = utils.combine_vars(self.vars, new_vars)
def _load_roles(self, roles, ds): # a role is a name that auto-includes the following if they exist # <rolename>/tasks/main.yml # <rolename>/handlers/main.yml # <rolename>/vars/main.yml # <rolename>/library # and it auto-extends tasks/handlers/vars_files/module paths as appropriate if found if roles is None: roles = [] if type(roles) != list: raise errors.AnsibleError("value of 'roles:' must be a list") new_tasks = [] new_handlers = [] new_vars_files = [] defaults_files = [] pre_tasks = ds.get('pre_tasks', None) if type(pre_tasks) != list: pre_tasks = [] for x in pre_tasks: new_tasks.append(x) # flush handlers after pre_tasks new_tasks.append(dict(meta='flush_handlers')) roles = self._build_role_dependencies(roles, [], self.vars) for (role,role_path,role_vars,default_vars) in roles: # special vars must be extracted from the dict to the included tasks special_keys = [ "sudo", "sudo_user", "when", "with_items" ] special_vars = {} for k in special_keys: if k in role_vars: special_vars[k] = role_vars[k] task_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'tasks')) handler_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'handlers')) vars_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')) defaults_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')) task = self._resolve_main(task_basepath) handler = self._resolve_main(handler_basepath) vars_file = self._resolve_main(vars_basepath) defaults_file = self._resolve_main(defaults_basepath) library = utils.path_dwim(self.basedir, os.path.join(role_path, 'library')) if not os.path.isfile(task) and not os.path.isfile(handler) and not os.path.isfile(vars_file) and not os.path.isdir(library): raise errors.AnsibleError("found role at %s, but cannot find %s or %s or %s or %s" % (role_path, task, handler, vars_file, library)) if os.path.isfile(task): nt = dict(include=pipes.quote(task), vars=role_vars, default_vars=default_vars) for k in special_keys: if k in special_vars: nt[k] = special_vars[k] new_tasks.append(nt) if os.path.isfile(handler): nt = dict(include=pipes.quote(handler), vars=role_vars) for k in special_keys: if k in special_vars: nt[k] = special_vars[k] new_handlers.append(nt) if os.path.isfile(vars_file): new_vars_files.append(vars_file) if os.path.isfile(defaults_file): defaults_files.append(defaults_file) if os.path.isdir(library): utils.plugins.module_finder.add_directory(library) tasks = ds.get('tasks', None) post_tasks = ds.get('post_tasks', None) handlers = ds.get('handlers', None) vars_files = ds.get('vars_files', None) if type(tasks) != list: tasks = [] if type(handlers) != list: handlers = [] if type(vars_files) != list: vars_files = [] if type(post_tasks) != list: post_tasks = [] new_tasks.extend(tasks) # flush handlers after tasks + role tasks new_tasks.append(dict(meta='flush_handlers')) new_tasks.extend(post_tasks) # flush handlers after post tasks new_tasks.append(dict(meta='flush_handlers')) new_handlers.extend(handlers) new_vars_files.extend(vars_files) ds['tasks'] = new_tasks ds['handlers'] = new_handlers ds['vars_files'] = new_vars_files self.default_vars = self._load_role_defaults(defaults_files) return ds
class ActionModule(object): def __init__(self, runner): self.runner = runner def run(self, conn, tmp_path, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) content = options.get('content', None) dest = options.get('dest', None) raw = utils.boolean(options.get('raw', 'no')) force = utils.boolean(options.get('force', 'yes')) # content with newlines is going to be escaped to safely load in yaml # now we need to unescape it so that the newlines are evaluated properly # when writing the file to disk if content: if isinstance(content, unicode): try: content = content.decode('unicode-escape') except UnicodeDecodeError: pass if (source is None and content is None and not 'first_available_file' in inject) or dest is None: result = dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) elif (source is not None or 'first_available_file' in inject) and content is not None: result = dict(failed=True, msg="src and content are mutually exclusive") return ReturnData(conn=conn, result=result) # Check if the source ends with a "/" source_trailing_slash = False if source: source_trailing_slash = source.endswith("/") # Define content_tempfile in case we set it after finding content populated. content_tempfile = None # If content is defined make a temp file and write the content into it. if content is not None: try: # If content comes to us as a dict it should be decoded json. # We need to encode it back into a string to write it out. if type(content) is dict: content_tempfile = self._create_content_tempfile( json.dumps(content)) else: content_tempfile = self._create_content_tempfile(content) source = content_tempfile except Exception, err: result = dict(failed=True, msg="could not write content temp file: %s" % err) return ReturnData(conn=conn, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src elif 'first_available_file' in inject: found = False for fn in inject.get('first_available_file'): fn_orig = fn fnt = template.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if not os.path.exists(fnd) and '_original_file' in inject: fnd = utils.path_dwim_relative(inject['_original_file'], 'files', fnt, self.runner.basedir, check=False) if os.path.exists(fnd): source = fnd found = True break if not found: results = dict( failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, result=results)
def run(self, conn, tmp, module_name, module_args, inject): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(module_args) source = options.get('src', None) dest = options.get('dest', None) if (source is None and not 'first_available_file' in inject) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in inject.get('first_available_file'): fn = utils.template(self.runner.basedir, fn, inject) if os.path.exists(fn): source = fn found = True break if not found: results = dict( failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, results=results) source = utils.template(self.runner.basedir, source, inject) source = utils.path_dwim(self.runner.basedir, source) local_md5 = utils.md5(source) if local_md5 is None: result = dict(failed=True, msg="could not find src=%s" % source) return ReturnData(conn=conn, result=result) remote_md5 = self.runner._remote_md5(conn, tmp, dest) exec_rc = None if local_md5 != remote_md5: # transfer the file to a remote tmp location tmp_src = tmp + os.path.basename(source) conn.put_file(source, tmp_src) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp) # run the copy module module_args = "%s src=%s" % (module_args, tmp_src) return self.runner._execute_module(conn, tmp, 'copy', module_args, inject=inject).daisychain( 'file', module_args) else: # no need to transfer the file, already correct md5 result = dict(changed=False, md5sum=remote_md5, transferred=False) return ReturnData(conn=conn, result=result).daisychain('file', module_args)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): """Run the method""" if not self.runner.is_playbook: raise errors.AnsibleError( 'FAILED: `config_templates` are only available in playbooks' ) options = self.grab_options(complex_args, module_args) try: source = options['src'] dest = options['dest'] config_overrides = options.get('config_overrides', dict()) config_type = options['config_type'] assert config_type.lower() in ['ini', 'json', 'yaml'] except KeyError as exp: result = dict(failed=True, msg=exp) return ReturnData(conn=conn, comm_ok=False, result=result) source_template = template.template( self.runner.basedir, source, inject ) if '_original_file' in inject: source_file = utils.path_dwim_relative( inject['_original_file'], 'templates', source_template, self.runner.basedir ) else: source_file = utils.path_dwim(self.runner.basedir, source_template) # Open the template file and return the data as a string. This is # being done here so that the file can be a vault encrypted file. resultant = template.template_from_file( self.runner.basedir, source_file, inject, vault_password=self.runner.vault_pass ) if config_overrides: type_merger = getattr(self, CONFIG_TYPES.get(config_type)) resultant = type_merger( config_overrides=config_overrides, resultant=resultant ) # Retemplate the resultant object as it may have new data within it # as provided by an override variable. template.template_from_string( basedir=self.runner.basedir, data=resultant, vars=inject, fail_on_undefined=True ) # Access to protected method is unavoidable in Ansible 1.x. new_module_args = dict( src=self.runner._transfer_str(conn, tmp, 'source', resultant), dest=dest, original_basename=os.path.basename(source), follow=True, ) module_args_tmp = utils.merge_module_args( module_args, new_module_args ) # Remove data types that are not available to the copy module complex_args.pop('config_overrides') complex_args.pop('config_type') # Return the copy module status. Access to protected method is # unavoidable in Ansible 1.x. return self.runner._execute_module( conn, tmp, 'copy', module_args_tmp, inject=inject, complex_args=complex_args )
def main(): module = AnsibleModule( argument_spec=dict( api_host=dict(required=True), api_user=dict(required=True), api_password=dict(no_log=True), validate_certs=dict(type='bool', default='no'), node=dict(), src=dict(), template=dict(), content_type=dict(default='vztmpl', choices=['vztmpl', 'iso']), storage=dict(default='local'), timeout=dict(type='int', default=30), force=dict(type='bool', default='no'), state=dict(default='present', choices=['present', 'absent']), ) ) if not HAS_PROXMOXER: module.fail_json(msg='proxmoxer required for this module') state = module.params['state'] api_user = module.params['api_user'] api_host = module.params['api_host'] api_password = module.params['api_password'] validate_certs = module.params['validate_certs'] node = module.params['node'] storage = module.params['storage'] timeout = module.params['timeout'] # If password not set get it from PROXMOX_PASSWORD env if not api_password: try: api_password = os.environ['PROXMOX_PASSWORD'] except KeyError as e: module.fail_json(msg='You should set api_password param or use PROXMOX_PASSWORD environment variable') try: proxmox = ProxmoxAPI(api_host, user=api_user, password=api_password, verify_ssl=validate_certs) except Exception as e: module.fail_json(msg='authorization on proxmox cluster failed with exception: %s' % e) if state == 'present': try: content_type = module.params['content_type'] src = module.params['src'] from ansible import utils realpath = utils.path_dwim(None, src) template = os.path.basename(realpath) if get_template(proxmox, node, storage, content_type, template) and not module.params['force']: module.exit_json(changed=False, msg='template with volid=%s:%s/%s is already exists' % (storage, content_type, template)) elif not src: module.fail_json(msg='src param to uploading template file is mandatory') elif not (os.path.exists(realpath) and os.path.isfile(realpath)): module.fail_json(msg='template file on path %s not exists' % realpath) if upload_template(module, proxmox, api_host, node, storage, content_type, realpath, timeout): module.exit_json(changed=True, msg='template with volid=%s:%s/%s uploaded' % (storage, content_type, template)) except Exception as e: module.fail_json(msg="uploading of template %s failed with exception: %s" % (template, e)) elif state == 'absent': try: content_type = module.params['content_type'] template = module.params['template'] if not template: module.fail_json(msg='template param is mandatory') elif not get_template(proxmox, node, storage, content_type, template): module.exit_json(changed=False, msg='template with volid=%s:%s/%s is already deleted' % (storage, content_type, template)) if delete_template(module, proxmox, node, storage, content_type, template, timeout): module.exit_json(changed=True, msg='template with volid=%s:%s/%s deleted' % (storage, content_type, template)) except Exception as e: module.fail_json(msg="deleting of template %s failed with exception: %s" % (template, e))
def _execute_template(self, conn, tmp): ''' handler for template operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) metadata = options.get('metadata', None) if (source is None and 'first_available_file' not in self.module_vars) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(host=conn.host, comm_ok=False, result=result) # apply templating to source argument so vars can be used in the path inject = self.setup_cache.get(conn.host,{}) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in self.module_vars: found = False for fn in self.module_vars.get('first_available_file'): fn = utils.template(fn, inject, self.setup_cache) if os.path.exists(fn): source = fn found = True break if not found: result = dict(failed=True, msg="could not find src in first_available_file list") return ReturnData(host=conn.host, comm_ok=False, result=result) if self.module_vars is not None: inject.update(self.module_vars) source = utils.template(source, inject, self.setup_cache) #(host, ok, data, err) = (None, None, None, None) if not self.is_playbook: # not running from a playbook so we have to fetch the remote # setup file contents before proceeding... if metadata is None: if self.remote_user == 'root': metadata = '/etc/ansible/setup' else: # path is expanded on remote side metadata = "~/.ansible/setup" # install the template module slurp_module = self._transfer_module(conn, tmp, 'slurp') # run the slurp module to get the metadata file args = "src=%s" % metadata result1 = self._execute_module(conn, tmp, slurp_module, args) if not 'content' in result1.result or result1.result.get('encoding','base64') != 'base64': result1.result['failed'] = True return result1 content = base64.b64decode(result1.result['content']) inject = utils.json_loads(content) # install the template module copy_module = self._transfer_module(conn, tmp, 'copy') # template the source data locally try: resultant = utils.template_from_file(utils.path_dwim(self.basedir, source), inject, self.setup_cache, no_engine=False) except Exception, e: result = dict(failed=True, msg=str(e)) return ReturnData(host=conn.host, comm_ok=False, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = os.path.expanduser(options.get('src', None)) dest = os.path.expanduser(options.get('dest', None)) copy = utils.boolean(options.get('copy', 'yes')) if source is None or dest is None: result = dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) source = template.template(self.runner.basedir, source, inject) if copy: if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) remote_md5 = self.runner._remote_md5(conn, tmp, dest) if remote_md5 != '3': result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest) return ReturnData(conn=conn, result=result) if copy: # transfer the file to a remote tmp location tmp_src = tmp + 'source' conn.put_file(source, tmp_src) # handle diff mode client side # handle check mode client side # fix file permissions when the copy is done as a different user if copy: if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp) module_args = "%s src=%s original_basename=%s" % ( module_args, pipes.quote(tmp_src), pipes.quote(os.path.basename(source))) else: module_args = "%s original_basename=%s" % ( module_args, pipes.quote(os.path.basename(source))) return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
raise SystemExit(str(e)) results = [] basedir = os.path.dirname(playbook[0]) items = _playbook_items(playbook_ds) for item in items: for child in play_children(basedir, item, playbook[1], playbook_dir): if "$" in child['path'] or "{{" in child['path']: continue valid_tokens = list() for token in split_args(child['path']): if '=' in token: break valid_tokens.append(token) path = ' '.join(valid_tokens) results.append({ 'path': path_dwim(basedir, path), 'type': child['type'] }) return results def template(basedir, value, vars, fail_on_undefined=False, **kwargs): try: value = ansible_template( os.path.abspath(basedir), value, vars, **dict(kwargs, fail_on_undefined=fail_on_undefined)) # Hack to skip the following exception when using to_json filter on a variable. # I guess the filter doesn't like empty vars... except (AnsibleError, ValueError): # templating failed, so just keep value as is. pass
def _load_playbook_from_file(self, path, vars={}): ''' run top level error checking on playbooks and allow them to include other playbooks. ''' playbook_data = utils.parse_yaml_from_file(path) accumulated_plays = [] play_basedirs = [] if type(playbook_data) != list: raise errors.AnsibleError( "parse error: playbooks must be formatted as a YAML list") basedir = os.path.dirname(path) utils.plugins.push_basedir(basedir) for play in playbook_data: if type(play) != dict: raise errors.AnsibleError( "parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play) if 'include' in play: if len(play.keys()) <= 2: tokens = shlex.split(play['include']) items = [''] for k in play.keys(): if not k.startswith("with_"): continue plugin_name = k[5:] if plugin_name not in utils.plugins.lookup_loader: raise errors.AnsibleError( "cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name)) terms = utils.template_ds(basedir, play[k], vars) items = utils.plugins.lookup_loader.get( plugin_name, basedir=basedir, runner=None).run(terms, inject=vars) break for item in items: incvars = vars.copy() incvars['item'] = item for t in tokens[1:]: (k, v) = t.split("=", 1) incvars[k] = utils.template_ds(basedir, v, incvars) included_path = utils.path_dwim(basedir, tokens[0]) (plays, basedirs) = self._load_playbook_from_file( included_path, incvars) for p in plays: if 'vars' not in p: p['vars'] = {} if isinstance(p['vars'], dict): p['vars'].update(incvars) elif isinstance(p['vars'], list): p['vars'].extend([ dict(k=v) for k, v in incvars.iteritems() ]) accumulated_plays.extend(plays) play_basedirs.extend(basedirs) else: raise errors.AnsibleError( "parse error: playbook includes cannot be used with other directives: %s" % play) else: accumulated_plays.append(play) play_basedirs.append(basedir) return (accumulated_plays, play_basedirs)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' if self.runner.noop_on_check(inject): # in check mode, always skip this module return ReturnData( conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not supported for this module')) # Decode the result of shlex.split() to UTF8 to get around a bug in that's been fixed in Python 2.7 but not Python 2.6. # See: http://bugs.python.org/issue6988 tokens = shlex.split(module_args.encode('utf8')) tokens = [s.decode('utf8') for s in tokens] source = tokens[0] # FIXME: error handling args = " ".join(tokens[1:]) source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) # transfer the file to a remote tmp location source = source.replace('\x00', '') # why does this happen here? args = args.replace('\x00', '') # why does this happen here? tmp_src = os.path.join(tmp, os.path.basename(source)) tmp_src = tmp_src.replace('\x00', '') conn.put_file(source, tmp_src) sudoable = True # set file permissions, more permisive when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': cmd_args_chmod = "chmod a+rx %s" % tmp_src sudoable = False else: cmd_args_chmod = "chmod +rx %s" % tmp_src self.runner._low_level_exec_command(conn, cmd_args_chmod, tmp, sudoable=sudoable) # add preparation steps to one ssh roundtrip executing the script env_string = self.runner._compute_environment_string(inject) module_args = env_string + tmp_src + ' ' + args handler = utils.plugins.action_loader.get('raw', self.runner) result = handler.run(conn, tmp, 'raw', module_args, inject) # clean up after if tmp.find("tmp") != -1 and not C.DEFAULT_KEEP_REMOTE_FILES: self.runner._low_level_exec_command( conn, 'rm -rf %s >/dev/null 2>&1' % tmp, tmp) result.result['changed'] = True return result
def template_from_file(basedir, path, vars): ''' run a file through the templating engine ''' fail_on_undefined = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR from ansible import utils realpath = utils.path_dwim(basedir, path) loader = jinja2.FileSystemLoader([basedir, os.path.dirname(realpath)]) def my_lookup(*args, **kwargs): kwargs['vars'] = vars return lookup(*args, basedir=basedir, **kwargs) environment = jinja2.Environment(loader=loader, trim_blocks=True, extensions=_get_extensions()) environment.filters.update(_get_filters()) environment.globals['lookup'] = my_lookup if fail_on_undefined: environment.undefined = StrictUndefined try: data = codecs.open(realpath, encoding="utf8").read() except UnicodeDecodeError: raise errors.AnsibleError("unable to process as utf-8: %s" % realpath) except: raise errors.AnsibleError("unable to read %s" % realpath) # Get jinja env overrides from template if data.startswith(JINJA2_OVERRIDE): eol = data.find('\n') line = data[len(JINJA2_OVERRIDE):eol] data = data[eol + 1:] for pair in line.split(','): (key, val) = pair.split(':') setattr(environment, key.strip(), ast.literal_eval(val.strip())) environment.template_class = J2Template t = environment.from_string(data) vars = vars.copy() try: template_uid = pwd.getpwuid(os.stat(realpath).st_uid).pw_name except: template_uid = os.stat(realpath).st_uid vars['template_host'] = os.uname()[1] vars['template_path'] = realpath vars['template_mtime'] = datetime.datetime.fromtimestamp( os.path.getmtime(realpath)) vars['template_uid'] = template_uid vars['template_fullpath'] = os.path.abspath(realpath) vars['template_run_date'] = datetime.datetime.now() managed_default = C.DEFAULT_MANAGED_STR managed_str = managed_default.format(host=vars['template_host'], uid=vars['template_uid'], file=vars['template_path']) vars['ansible_managed'] = time.strftime( managed_str, time.localtime(os.path.getmtime(realpath))) # This line performs deep Jinja2 magic that uses the _jinja2_vars object for vars # Ideally, this could use some API where setting shared=True and the object won't get # passed through dict(o), but I have not found that yet. try: res = jinja2.utils.concat( t.root_render_func( t.new_context(_jinja2_vars(basedir, vars, t.globals), shared=True))) except jinja2.exceptions.UndefinedError, e: raise errors.AnsibleUndefinedVariable( "One or more undefined variables: %s" % str(e))
source = fnd found = True break if not found: results = dict( failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, result=results) else: source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) # A list of source file tuples (full_path, relative_path) which will try to copy to the destination source_files = [] # If source is a directory populate our list else source is a file and translate it to a tuple. if os.path.isdir(source): # Get the amount of spaces to remove to get the relative path. if source_trailing_slash: sz = len(source) + 1 else: sz = len(source.rsplit('/', 1)[0]) + 1 # Walk the directory and append the file tuples to source_files. for base_path, sub_folders, files in os.walk(source): for file in files:
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' if self.runner.noop_on_check(inject): # in check mode, always skip this module return ReturnData( conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not supported for this module')) # extract ansible reserved parameters # From library/command keep in sync creates = None removes = None r = re.compile( r'(^|\s)(creates|removes)=(?P<quote>[\'"])?(.*?)(?(quote)(?<!\\)(?P=quote))((?<!\\)(?=\s)|$)' ) for m in r.finditer(module_args): v = m.group(4).replace("\\", "") if m.group(2) == "creates": creates = v elif m.group(2) == "removes": removes = v module_args = r.sub("", module_args) if creates: # do not run the command if the line contains creates=filename # and the filename already exists. This allows idempotence # of command executions. module_args_tmp = "path=%s" % creates module_return = self.runner._execute_module( conn, tmp, 'stat', module_args_tmp, inject=inject, complex_args=complex_args, persist_files=True) stat = module_return.result.get('stat', None) if stat and stat.get('exists', False): return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg=("skipped, since %s exists" % creates))) if removes: # do not run the command if the line contains removes=filename # and the filename does not exist. This allows idempotence # of command executions. module_args_tmp = "path=%s" % removes module_return = self.runner._execute_module( conn, tmp, 'stat', module_args_tmp, inject=inject, complex_args=complex_args, persist_files=True) stat = module_return.result.get('stat', None) if stat and not stat.get('exists', False): return ReturnData(conn=conn, comm_ok=True, result=dict( skipped=True, msg=("skipped, since %s does not exist" % removes))) # Decode the result of shlex.split() to UTF8 to get around a bug in that's been fixed in Python 2.7 but not Python 2.6. # See: http://bugs.python.org/issue6988 tokens = shlex.split(module_args.encode('utf8')) tokens = [s.decode('utf8') for s in tokens] # extract source script source = tokens[0] # FIXME: error handling args = " ".join(tokens[1:]) source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) # transfer the file to a remote tmp location source = source.replace('\x00', '') # why does this happen here? args = args.replace('\x00', '') # why does this happen here? tmp_src = conn.shell.join_path(tmp, os.path.basename(source)) tmp_src = tmp_src.replace('\x00', '') conn.put_file(source, tmp_src) sudoable = True # set file permissions, more permissive when the copy is done as a different user if ((self.runner.sudo and self.runner.sudo_user != 'root') or (self.runner.su and self.runner.su_user != 'root')): chmod_mode = 'a+rx' sudoable = False else: chmod_mode = '+rx' self.runner._remote_chmod(conn, chmod_mode, tmp_src, tmp, sudoable=sudoable, su=self.runner.su) # add preparation steps to one ssh roundtrip executing the script env_string = self.runner._compute_environment_string(conn, inject) module_args = ' '.join([env_string, tmp_src, args]) handler = utils.plugins.action_loader.get('raw', self.runner) result = handler.run(conn, tmp, 'raw', module_args, inject) # clean up after if "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES: self.runner._remove_tmp_path(conn, tmp) result.result['changed'] = True return result
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for template operations ''' if not self.runner.is_playbook: raise errors.AnsibleError( "in current versions of ansible, templates are only usable in playbooks" ) # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) if (source is None and 'first_available_file' not in inject) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, comm_ok=False, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in self.runner.module_vars.get('first_available_file'): fn_orig = fn fnt = template.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if not os.path.exists(fnd) and '_original_file' in inject: fnd = utils.path_dwim_relative(inject['_original_file'], 'templates', fnt, self.runner.basedir, check=False) if os.path.exists(fnd): source = fnd found = True break if not found: result = dict( failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, comm_ok=False, result=result) else: source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'templates', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) if dest.endswith("/"): # CCTODO: Fix path for Windows hosts. base = os.path.basename(source) dest = os.path.join(dest, base) # template the source data locally & get ready to transfer try: resultant = template.template_from_file( self.runner.basedir, source, inject, vault_password=self.runner.vault_pass) except Exception, e: result = dict(failed=True, msg=type(e).__name__ + ": " + str(e)) return ReturnData(conn=conn, comm_ok=False, result=result)
def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0): # this number is arbitrary, but it seems sane if level > 20: raise errors.AnsibleError( "too many levels of recursion while resolving role dependencies" ) for role in roles: role_path, role_vars = self._get_role_path(role) role_vars = utils.combine_vars(passed_vars, role_vars) vars = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'vars'))) vars_data = {} if os.path.isfile(vars): vars_data = utils.parse_yaml_from_file(vars) if vars_data: if not isinstance(vars_data, dict): raise errors.AnsibleError( "vars from '%s' are not a dict" % vars) role_vars = utils.combine_vars(vars_data, role_vars) defaults = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults'))) defaults_data = {} if os.path.isfile(defaults): defaults_data = utils.parse_yaml_from_file(defaults) # the meta directory contains the yaml that should # hold the list of dependencies (if any) meta = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'meta'))) if os.path.isfile(meta): data = utils.parse_yaml_from_file(meta) if data: dependencies = data.get('dependencies', []) if dependencies is None: dependencies = [] for dep in dependencies: allow_dupes = False (dep_path, dep_vars) = self._get_role_path(dep) meta = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta'))) if os.path.isfile(meta): meta_data = utils.parse_yaml_from_file(meta) if meta_data: allow_dupes = utils.boolean( meta_data.get('allow_duplicates', '')) # if tags are set from this role, merge them # into the tags list for the dependent role if "tags" in passed_vars: for included_role_dep in dep_stack: included_dep_name = included_role_dep[0] included_dep_vars = included_role_dep[2] if included_dep_name == dep: if "tags" in included_dep_vars: included_dep_vars["tags"] = list( set(included_dep_vars["tags"] + passed_vars["tags"])) else: included_dep_vars[ "tags"] = passed_vars["tags"].copy( ) dep_vars = utils.combine_vars(passed_vars, dep_vars) dep_vars = utils.combine_vars(role_vars, dep_vars) vars = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars'))) vars_data = {} if os.path.isfile(vars): vars_data = utils.parse_yaml_from_file(vars) if vars_data: dep_vars = utils.combine_vars( vars_data, dep_vars) defaults = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults'))) dep_defaults_data = {} if os.path.isfile(defaults): dep_defaults_data = utils.parse_yaml_from_file( defaults) if 'role' in dep_vars: del dep_vars['role'] if "tags" in passed_vars: if not self._is_valid_tag(passed_vars["tags"]): # one of the tags specified for this role was in the # skip list, or we're limiting the tags and it didn't # match one, so we just skip it completely continue if not allow_dupes: if dep in self.included_roles: # skip back to the top, since we don't want to # do anything else with this role continue else: self.included_roles.append(dep) # pass along conditionals from roles to dep roles if type(role) is dict: if 'when' in passed_vars: if 'when' in dep_vars: tmpcond = [] if type(passed_vars['when']) is str: tmpcond.append(passed_vars['when']) elif type(passed_vars['when']) is list: tmpcond.join(passed_vars['when']) if type(dep_vars['when']) is str: tmpcond.append(dep_vars['when']) elif type(dep_vars['when']) is list: tmpcond += dep_vars['when'] if len(tmpcond) > 0: dep_vars['when'] = tmpcond else: dep_vars['when'] = passed_vars['when'] self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level + 1) dep_stack.append( [dep, dep_path, dep_vars, dep_defaults_data]) # only add the current role when we're at the top level, # otherwise we'll end up in a recursive loop if level == 0: self.included_roles.append(role) dep_stack.append([role, role_path, role_vars, defaults_data]) return dep_stack
def _load_tasks(self, tasks, vars={}, default_vars={}, sudo_vars={}, additional_conditions=[], original_file=None): ''' handle task and handler include statements ''' results = [] if tasks is None: # support empty handler files, and the like. tasks = [] for x in tasks: if not isinstance(x, dict): raise errors.AnsibleError("expecting dict; got: %s" % x) # evaluate sudo vars for current and child tasks included_sudo_vars = {} for k in ["sudo", "sudo_user"]: if k in x: included_sudo_vars[k] = x[k] elif k in sudo_vars: included_sudo_vars[k] = sudo_vars[k] x[k] = sudo_vars[k] if 'meta' in x: if x['meta'] == 'flush_handlers': results.append(Task(self,x)) continue task_vars = self.vars.copy() task_vars.update(vars) if original_file: task_vars['_original_file'] = original_file if 'include' in x: tokens = shlex.split(str(x['include'])) items = [''] included_additional_conditions = list(additional_conditions) for k in x: if k.startswith("with_"): plugin_name = k[5:] if plugin_name not in utils.plugins.lookup_loader: raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name)) terms = template(self.basedir, x[k], task_vars) items = utils.plugins.lookup_loader.get(plugin_name, basedir=self.basedir, runner=None).run(terms, inject=task_vars) elif k.startswith("when_"): included_additional_conditions.insert(0, utils.compile_when_to_only_if("%s %s" % (k[5:], x[k]))) elif k == 'when': included_additional_conditions.insert(0, utils.compile_when_to_only_if("jinja2_compare %s" % x[k])) elif k in ("include", "vars", "default_vars", "only_if", "sudo", "sudo_user"): pass else: raise errors.AnsibleError("parse error: task includes cannot be used with other directives: %s" % k) default_vars = utils.combine_vars(self.default_vars, x.get('default_vars', {})) if 'vars' in x: task_vars = utils.combine_vars(task_vars, x['vars']) if 'only_if' in x: included_additional_conditions.append(x['only_if']) for item in items: mv = task_vars.copy() mv['item'] = item for t in tokens[1:]: (k,v) = t.split("=", 1) mv[k] = template(self.basedir, v, mv) dirname = self.basedir if original_file: dirname = os.path.dirname(original_file) include_file = template(dirname, tokens[0], mv) include_filename = utils.path_dwim(dirname, include_file) data = utils.parse_yaml_from_file(include_filename) results += self._load_tasks(data, mv, default_vars, included_sudo_vars, included_additional_conditions, original_file=include_filename) elif type(x) == dict: results.append(Task(self,x,module_vars=task_vars,default_vars=default_vars,additional_conditions=additional_conditions)) else: raise Exception("unexpected task type") for x in results: if self.tags is not None: x.tags.extend(self.tags) return results
def _load_tasks(self, tasks, vars=None, default_vars=None, sudo_vars=None, additional_conditions=None, original_file=None, role_name=None): ''' handle task and handler include statements ''' results = [] if tasks is None: # support empty handler files, and the like. tasks = [] if additional_conditions is None: additional_conditions = [] if vars is None: vars = {} if default_vars is None: default_vars = {} if sudo_vars is None: sudo_vars = {} old_conditions = list(additional_conditions) for x in tasks: # prevent assigning the same conditions to each task on an include included_additional_conditions = list(old_conditions) if not isinstance(x, dict): raise errors.AnsibleError("expecting dict; got: %s" % x) # evaluate sudo vars for current and child tasks included_sudo_vars = {} for k in ["sudo", "sudo_user"]: if k in x: included_sudo_vars[k] = x[k] elif k in sudo_vars: included_sudo_vars[k] = sudo_vars[k] x[k] = sudo_vars[k] if 'meta' in x: if x['meta'] == 'flush_handlers': results.append(Task(self, x)) continue task_vars = self.vars.copy() task_vars.update(vars) if original_file: task_vars['_original_file'] = original_file if 'include' in x: tokens = shlex.split(str(x['include'])) items = [''] included_additional_conditions = list(additional_conditions) include_vars = {} for k in x: if k.startswith("with_"): utils.deprecated( "include + with_items is a removed deprecated feature", "1.5", removed=True) elif k.startswith("when_"): utils.deprecated( "\"when_<criteria>:\" is a removed deprecated feature, use the simplified 'when:' conditional directly", None, removed=True) elif k == 'when': if type(x[k]) is str: included_additional_conditions.insert(0, x[k]) elif type(x[k]) is list: for i in x[k]: included_additional_conditions.insert(0, i) elif k in ("include", "vars", "default_vars", "sudo", "sudo_user", "role_name"): continue else: include_vars[k] = x[k] default_vars = x.get('default_vars', {}) if not default_vars: default_vars = self.default_vars else: default_vars = utils.combine_vars(self.default_vars, default_vars) # append the vars defined with the include (from above) # as well as the old-style 'vars' element. The old-style # vars are given higher precedence here (just in case) task_vars = utils.combine_vars(task_vars, include_vars) if 'vars' in x: task_vars = utils.combine_vars(task_vars, x['vars']) if 'when' in x: included_additional_conditions.append(x['when']) new_role = None if 'role_name' in x: new_role = x['role_name'] for item in items: mv = task_vars.copy() mv['item'] = item for t in tokens[1:]: (k, v) = t.split("=", 1) mv[k] = template(self.basedir, v, mv) dirname = self.basedir if original_file: dirname = os.path.dirname(original_file) include_file = template(dirname, tokens[0], mv) include_filename = utils.path_dwim(dirname, include_file) data = utils.parse_yaml_from_file(include_filename) if 'role_name' in x and data is not None: for x in data: if 'include' in x: x['role_name'] = new_role loaded = self._load_tasks( data, mv, default_vars, included_sudo_vars, list(included_additional_conditions), original_file=include_filename, role_name=new_role) results += loaded elif type(x) == dict: task = Task(self, x, module_vars=task_vars, default_vars=default_vars, additional_conditions=list(additional_conditions), role_name=role_name) results.append(task) else: raise Exception("unexpected task type") for x in results: if self.tags is not None: x.tags.extend(self.tags) return results
def _load_playbook_from_file(self, path, vars={}): ''' run top level error checking on playbooks and allow them to include other playbooks. ''' playbook_data = utils.parse_yaml_from_file(path) accumulated_plays = [] play_basedirs = [] if type(playbook_data) != list: raise errors.AnsibleError( "parse error: playbooks must be formatted as a YAML list, got %s" % type(playbook_data)) basedir = os.path.dirname(path) or '.' utils.plugins.push_basedir(basedir) for play in playbook_data: if type(play) != dict: raise errors.AnsibleError( "parse error: each play in a playbook must be a YAML dictionary (hash), recieved: %s" % play) if 'include' in play: # a playbook (list of plays) decided to include some other list of plays # from another file. The result is a flat list of plays in the end. tokens = shlex.split(play['include']) incvars = vars.copy() if 'vars' in play: if isinstance(play['vars'], dict): incvars.update(play['vars']) elif isinstance(play['vars'], list): for v in play['vars']: incvars.update(v) # allow key=value parameters to be specified on the include line # to set variables for t in tokens[1:]: (k, v) = t.split("=", 1) incvars[k] = template(basedir, v, incvars) included_path = utils.path_dwim( basedir, template(basedir, tokens[0], incvars)) (plays, basedirs) = self._load_playbook_from_file( included_path, incvars) for p in plays: # support for parameterized play includes works by passing # those variables along to the subservient play if 'vars' not in p: p['vars'] = {} if isinstance(p['vars'], dict): p['vars'].update(incvars) elif isinstance(p['vars'], list): # nobody should really do this, but handle vars: a=1 b=2 p['vars'].extend( [dict(k=v) for k, v in incvars.iteritems()]) accumulated_plays.extend(plays) play_basedirs.extend(basedirs) else: # this is a normal (non-included play) accumulated_plays.append(play) play_basedirs.append(basedir) return (accumulated_plays, play_basedirs)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) content = options.get('content', None) dest = options.get('dest', None) raw = utils.boolean(options.get('raw', 'no')) force = utils.boolean(options.get('force', 'yes')) if (source is None and content is None and not 'first_available_file' in inject) or dest is None: result = dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) elif (source is not None or 'first_available_file' in inject) and content is not None: result = dict(failed=True, msg="src and content are mutually exclusive") return ReturnData(conn=conn, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in inject.get('first_available_file'): fn_orig = fn fnt = template.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if not os.path.exists(fnd) and '_original_file' in inject: fnd = utils.path_dwim_relative(inject['_original_file'], 'files', fnt, self.runner.basedir, check=False) if os.path.exists(fnd): source = fnd found = True break if not found: results = dict( failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, result=results) elif content is not None: fd, tmp_content = tempfile.mkstemp() f = os.fdopen(fd, 'w') try: f.write(content) except Exception, err: os.remove(tmp_content) result = dict(failed=True, msg="could not write content temp file: %s" % err) return ReturnData(conn=conn, result=result) f.close() source = tmp_content
def _load_tasks(self, tasks, vars=None, role_params=None, default_vars=None, become_vars=None, additional_conditions=None, original_file=None, role_name=None): ''' handle task and handler include statements ''' results = [] if tasks is None: # support empty handler files, and the like. tasks = [] if additional_conditions is None: additional_conditions = [] if vars is None: vars = {} if role_params is None: role_params = {} if default_vars is None: default_vars = {} if become_vars is None: become_vars = {} old_conditions = list(additional_conditions) for x in tasks: # prevent assigning the same conditions to each task on an include included_additional_conditions = list(old_conditions) if not isinstance(x, dict): raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file)) # evaluate privilege escalation vars for current and child tasks included_become_vars = {} for k in ["become", "become_user", "become_method", "become_exe", "sudo", "su", "sudo_user", "su_user"]: if k in x: included_become_vars[k] = x[k] elif k in become_vars: included_become_vars[k] = become_vars[k] x[k] = become_vars[k] task_vars = vars.copy() if original_file: task_vars['_original_file'] = original_file if 'meta' in x: if x['meta'] == 'flush_handlers': if role_name and 'role_name' not in x: x['role_name'] = role_name results.append(Task(self, x, module_vars=task_vars, role_name=role_name)) continue if 'include' in x: tokens = split_args(str(x['include'])) included_additional_conditions = list(additional_conditions) include_vars = {} for k in x: if k.startswith("with_"): if original_file: offender = " (in %s)" % original_file else: offender = "" utils.deprecated("include + with_items is a removed deprecated feature" + offender, "1.5", removed=True) elif k.startswith("when_"): utils.deprecated("\"when_<criteria>:\" is a removed deprecated feature, use the simplified 'when:' conditional directly", None, removed=True) elif k == 'when': if isinstance(x[k], (basestring, bool)): included_additional_conditions.append(x[k]) elif type(x[k]) is list: included_additional_conditions.extend(x[k]) elif k in ("include", "vars", "role_params", "default_vars", "sudo", "sudo_user", "role_name", "no_log", "become", "become_user", "su", "su_user"): continue else: include_vars[k] = x[k] # get any role parameters specified role_params = x.get('role_params', {}) # get any role default variables specified default_vars = x.get('default_vars', {}) if not default_vars: default_vars = self.default_vars else: default_vars = utils.combine_vars(self.default_vars, default_vars) # append the vars defined with the include (from above) # as well as the old-style 'vars' element. The old-style # vars are given higher precedence here (just in case) task_vars = utils.combine_vars(task_vars, include_vars) if 'vars' in x: task_vars = utils.combine_vars(task_vars, x['vars']) new_role = None if 'role_name' in x: new_role = x['role_name'] mv = task_vars.copy() for t in tokens[1:]: (k,v) = t.split("=", 1) v = unquote(v) mv[k] = template(self.basedir, v, mv) dirname = self.basedir if original_file: dirname = os.path.dirname(original_file) # temp vars are used here to avoid trampling on the existing vars structures temp_vars = utils.combine_vars(self.vars, self.vars_file_vars) temp_vars = utils.combine_vars(temp_vars, mv) temp_vars = utils.combine_vars(temp_vars, self.playbook.extra_vars) include_file = template(dirname, tokens[0], temp_vars) include_filename = utils.path_dwim(dirname, include_file) data = utils.parse_yaml_from_file(include_filename, vault_password=self.vault_password) if 'role_name' in x and data is not None: for y in data: if isinstance(y, dict) and 'include' in y: y['role_name'] = new_role loaded = self._load_tasks(data, mv, role_params, default_vars, included_become_vars, list(included_additional_conditions), original_file=include_filename, role_name=new_role) results += loaded elif type(x) == dict: task = Task( self, x, module_vars=task_vars, play_vars=self.vars, play_file_vars=self.vars_file_vars, role_vars=self.role_vars, role_params=role_params, default_vars=default_vars, additional_conditions=list(additional_conditions), role_name=role_name ) results.append(task) else: raise Exception("unexpected task type") for x in results: if self.tags is not None: x.tags.extend(self.tags) return results
def run(self, conn, tmp, module_name, module_args, inject): ''' handler for fetch operations ''' # load up options options = utils.parse_kv(module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: results = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, result=results) # apply templating to source argument source = utils.template(self.runner.basedir, source, inject) # apply templating to dest argument dest = utils.template(self.runner.basedir, dest, inject) # files are saved in dest dir, with a subdir for each host, then the filename dest = "%s/%s/%s" % (utils.path_dwim(self.runner.basedir, dest), conn.host, source) dest = dest.replace("//", "/") # calculate md5 sum for the remote file remote_md5 = self.runner._remote_md5(conn, tmp, source) # these don't fail because you may want to transfer a log file that possibly MAY exist # but keep going to fetch other log files if remote_md5 == '0': result = dict( msg="unable to calculate the md5 sum of the remote file", file=source, changed=False) return ReturnData(conn=conn, result=result) if remote_md5 == '1': result = dict( msg="the remote file does not exist, not transferring, ignored", file=source, changed=False) return ReturnData(conn=conn, result=result) if remote_md5 == '2': result = dict( msg= "no read permission on remote file, not transferring, ignored", file=source, changed=False) return ReturnData(conn=conn, result=result) # calculate md5 sum for the local file local_md5 = utils.md5(dest) if remote_md5 != local_md5: # create the containing directories, if needed if not os.path.isdir(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes conn.fetch_file(source, dest) new_md5 = utils.md5(dest) if new_md5 != remote_md5: result = dict(failed=True, md5sum=new_md5, msg="md5 mismatch", file=source) return ReturnData(conn=conn, result=result) result = dict(changed=True, md5sum=new_md5) return ReturnData(conn=conn, result=result) else: result = dict(changed=False, md5sum=local_md5, file=source) return ReturnData(conn=conn, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) copy = utils.boolean(options.get('copy', 'yes')) if source is None or dest is None: result = dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) dest = os.path.expanduser(dest) # CCTODO: Fix path for Windows hosts. source = template.template(self.runner.basedir, os.path.expanduser(source), inject) if copy: if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) remote_md5 = self.runner._remote_md5(conn, tmp, dest) if remote_md5 != '3': result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest) return ReturnData(conn=conn, result=result) if copy: # transfer the file to a remote tmp location tmp_src = tmp + 'source' conn.put_file(source, tmp_src) # handle diff mode client side # handle check mode client side # fix file permissions when the copy is done as a different user if copy: if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root': if not self.runner.noop_on_check(inject): self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp) # Build temporary module_args. new_module_args = dict( src=tmp_src, original_basename=os.path.basename(source), ) # make sure checkmod is passed on correctly if self.runner.noop_on_check(inject): new_module_args['CHECKMODE'] = True module_args = utils.merge_module_args(module_args, new_module_args) else: module_args = "%s original_basename=%s" % ( module_args, pipes.quote(os.path.basename(source))) # make sure checkmod is passed on correctly if self.runner.noop_on_check(inject): module_args += " CHECKMODE=True" return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
def run(self, terms, inject=None, **kwargs): terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) ret = [] for term in terms: # you can't have escaped spaces in yor pathname params = term.split() relpath = params[0] paramvals = { 'length': LookupModule.LENGTH, 'encrypt': None, } # get non-default parameters if specified try: for param in params[1:]: name, value = param.split('=') assert(name in paramvals) if name == 'length': paramvals[name] = int(value) else: paramvals[name] = value except (ValueError, AssertionError) as e: raise errors.AnsibleError(e) length = paramvals['length'] encrypt = paramvals['encrypt'] # get password or create it if file doesn't exist path = utils.path_dwim(self.basedir, relpath) if not os.path.exists(path): pathdir = os.path.dirname(path) if not os.path.isdir(pathdir): os.makedirs(pathdir) chars = ascii_letters + digits + ".,:-_" password = utils.random_password(length) if encrypt is not None: salt = self.random_salt() content = '%s salt=%s' % (password, salt) else: content = password with open(path, 'w') as f: f.write(content + '\n') else: content = open(path).read().rstrip() sep = content.find(' ') if sep >= 0: password = content[:sep] salt = content[sep+1:].split('=')[1] else: password = content salt = None # crypt requested, add salt if missing if (encrypt is not None and not salt): salt = self.random_salt() content = '%s salt=%s' % (password, salt) with open(path, 'w') as f: f.write(content + '\n') # crypt not requested, remove salt if present elif (encrypt is None and salt): with open(path, 'w') as f: f.write(password + '\n') if encrypt: password = utils.do_encrypt(password, encrypt, salt=salt) ret.append(password) return ret
def _load_tasks(self, tasks, vars={}, additional_conditions=[]): ''' handle task and handler include statements ''' results = [] if tasks is None: # support empty handler files, and the like. tasks = [] for x in tasks: task_vars = self.vars.copy() task_vars.update(vars) if 'include' in x: tokens = shlex.split(x['include']) items = [''] included_additional_conditions = list(additional_conditions) for k in x: if k.startswith("with_"): plugin_name = k[5:] if plugin_name not in utils.plugins.lookup_loader: raise errors.AnsibleError( "cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name)) terms = utils.template_ds(self.basedir, x[k], task_vars) items = utils.plugins.lookup_loader.get( plugin_name, basedir=self.basedir, runner=None).run(terms, inject=task_vars) elif k.startswith("when_"): included_additional_conditions.append( utils.compile_when_to_only_if("%s %s" % (k[5:], x[k]))) elif k in ("include", "vars", "only_if"): pass else: raise errors.AnsibleError( "parse error: task includes cannot be used with other directives: %s" % k) if 'vars' in x: task_vars.update(x['vars']) if 'only_if' in x: included_additional_conditions.append(x['only_if']) for item in items: mv = task_vars.copy() mv['item'] = item for t in tokens[1:]: (k, v) = t.split("=", 1) mv[k] = utils.template_ds(self.basedir, v, mv) include_file = utils.template(self.basedir, tokens[0], mv) data = utils.parse_yaml_from_file( utils.path_dwim(self.basedir, include_file)) results += self._load_tasks( data, mv, included_additional_conditions) elif type(x) == dict: results.append( Task(self, x, module_vars=task_vars, additional_conditions=additional_conditions)) else: raise Exception("unexpected task type") for x in results: if self.tags is not None: x.tags.extend(self.tags) return results
def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0): # this number is arbitrary, but it seems sane if level > 20: raise errors.AnsibleError( "too many levels of recursion while resolving role dependencies" ) for role in roles: role_path, role_vars = self._get_role_path(role) role_vars = utils.combine_vars(passed_vars, role_vars) vars = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'vars'))) vars_data = {} if os.path.isfile(vars): vars_data = utils.parse_yaml_from_file( vars, vault_password=self.vault_password) if vars_data: if not isinstance(vars_data, dict): raise errors.AnsibleError( "vars from '%s' are not a dict" % vars) role_vars = utils.combine_vars(vars_data, role_vars) defaults = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults'))) defaults_data = {} if os.path.isfile(defaults): defaults_data = utils.parse_yaml_from_file( defaults, vault_password=self.vault_password) # the meta directory contains the yaml that should # hold the list of dependencies (if any) meta = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(role_path, 'meta'))) if os.path.isfile(meta): data = utils.parse_yaml_from_file( meta, vault_password=self.vault_password) if data: dependencies = data.get('dependencies', []) if dependencies is None: dependencies = [] for dep in dependencies: allow_dupes = False (dep_path, dep_vars) = self._get_role_path(dep) meta = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta'))) if os.path.isfile(meta): meta_data = utils.parse_yaml_from_file( meta, vault_password=self.vault_password) if meta_data: allow_dupes = utils.boolean( meta_data.get('allow_duplicates', '')) # if any tags were specified as role/dep variables, merge # them into the current dep_vars so they're passed on to any # further dependencies too, and so we only have one place # (dep_vars) to look for tags going forward def __merge_tags(var_obj): old_tags = dep_vars.get('tags', []) if isinstance(old_tags, basestring): old_tags = [ old_tags, ] if isinstance(var_obj, dict): new_tags = var_obj.get('tags', []) if isinstance(new_tags, basestring): new_tags = [ new_tags, ] else: new_tags = [] return list(set(old_tags).union(set(new_tags))) dep_vars['tags'] = __merge_tags(role_vars) dep_vars['tags'] = __merge_tags(passed_vars) # if tags are set from this role, merge them # into the tags list for the dependent role if "tags" in passed_vars: for included_role_dep in dep_stack: included_dep_name = included_role_dep[0] included_dep_vars = included_role_dep[2] if included_dep_name == dep: if "tags" in included_dep_vars: included_dep_vars["tags"] = list( set(included_dep_vars["tags"]). union(set(passed_vars["tags"]))) else: included_dep_vars[ "tags"] = passed_vars["tags"][:] dep_vars = utils.combine_vars(passed_vars, dep_vars) dep_vars = utils.combine_vars(role_vars, dep_vars) vars = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars'))) vars_data = {} if os.path.isfile(vars): vars_data = utils.parse_yaml_from_file( vars, vault_password=self.vault_password) if vars_data: dep_vars = utils.combine_vars( vars_data, dep_vars) defaults = self._resolve_main( utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults'))) dep_defaults_data = {} if os.path.isfile(defaults): dep_defaults_data = utils.parse_yaml_from_file( defaults, vault_password=self.vault_password) if 'role' in dep_vars: del dep_vars['role'] if not allow_dupes: if dep in self.included_roles: # skip back to the top, since we don't want to # do anything else with this role continue else: self.included_roles.append(dep) def _merge_conditional(cur_conditionals, new_conditionals): if isinstance(new_conditionals, (basestring, bool)): cur_conditionals.append(new_conditionals) elif isinstance(new_conditionals, list): cur_conditionals.extend(new_conditionals) # pass along conditionals from roles to dep roles passed_when = passed_vars.get('when') role_when = role_vars.get('when') dep_when = dep_vars.get('when') tmpcond = [] _merge_conditional(tmpcond, passed_when) _merge_conditional(tmpcond, role_when) _merge_conditional(tmpcond, dep_when) if len(tmpcond) > 0: dep_vars['when'] = tmpcond self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level + 1) dep_stack.append( [dep, dep_path, dep_vars, dep_defaults_data]) # only add the current role when we're at the top level, # otherwise we'll end up in a recursive loop if level == 0: self.included_roles.append(role) dep_stack.append([role, role_path, role_vars, defaults_data]) return dep_stack
def _taskshandlers_children(basedir, k, v, parent_type): return [{ 'path': path_dwim(basedir, th['include']), 'type': 'tasks' } for th in v if 'include' in th]
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for fetch operations ''' if self.runner.noop_on_check(inject): return ReturnData( conn=conn, comm_ok=True, result=dict( skipped=True, msg='check mode not (yet) supported for this module')) # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) flat = options.get('flat', False) flat = utils.boolean(flat) fail_on_missing = options.get('fail_on_missing', False) fail_on_missing = utils.boolean(fail_on_missing) validate_checksum = options.get('validate_checksum', None) if validate_checksum is not None: validate_checksum = utils.boolean(validate_checksum) # Alias for validate_checksum (old way of specifying it) validate_md5 = options.get('validate_md5', None) if validate_md5 is not None: validate_md5 = utils.boolean(validate_md5) if validate_md5 is None and validate_checksum is None: # Default validate_checksum = True elif validate_checksum is None: validate_checksum = validate_md5 elif validate_md5 is not None and validate_checksum is not None: results = dict( failed=True, msg= "validate_checksum and validate_md5 cannot both be specified") return ReturnData(conn, result=results) if source is None or dest is None: results = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, result=results) source = conn.shell.join_path(source) source = self.runner._remote_expand_user(conn, source, tmp) # calculate checksum for the remote file remote_checksum = self.runner._remote_checksum(conn, tmp, source, inject) # use slurp if sudo and permissions are lacking remote_data = None if remote_checksum in ('1', '2') or self.runner.sudo: slurpres = self.runner._execute_module(conn, tmp, 'slurp', 'src=%s' % source, inject=inject) if slurpres.is_successful(): if slurpres.result['encoding'] == 'base64': remote_data = base64.b64decode(slurpres.result['content']) if remote_data is not None: remote_checksum = utils.checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.result.get('source') if remote_source and remote_source != source: source = remote_source # calculate the destination name if os.path.sep not in conn.shell.join_path('a', ''): source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if dest.endswith("/"): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = utils.path_dwim(self.runner.basedir, dest) else: # files are saved in dest dir, with a subdir for each host, then the filename dest = "%s/%s/%s" % (utils.path_dwim(self.runner.basedir, dest), inject['inventory_hostname'], source_local) dest = dest.replace("//", "/") if remote_checksum in ('0', '1', '2', '3', '4'): # these don't fail because you may want to transfer a log file that possibly MAY exist # but keep going to fetch other log files if remote_checksum == '0': result = dict( msg="unable to calculate the checksum of the remote file", file=source, changed=False) elif remote_checksum == '1': if fail_on_missing: result = dict(failed=True, msg="the remote file does not exist", file=source) else: result = dict( msg= "the remote file does not exist, not transferring, ignored", file=source, changed=False) elif remote_checksum == '2': result = dict( msg= "no read permission on remote file, not transferring, ignored", file=source, changed=False) elif remote_checksum == '3': result = dict( msg= "remote file is a directory, fetch cannot work on directories", file=source, changed=False) elif remote_checksum == '4': result = dict( msg= "python isn't present on the system. Unable to compute checksum", file=source, changed=False) return ReturnData(conn=conn, result=result) # calculate checksum for the local file local_checksum = utils.checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed if not os.path.isdir(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: conn.fetch_file(source, dest) else: f = open(dest, 'w') f.write(remote_data) f.close() new_checksum = utils.secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled # systems try: new_md5 = utils.md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result = dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum) return ReturnData(conn=conn, result=result) result = dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum) return ReturnData(conn=conn, result=result) else: # For backwards compatibility. We'll return None on FIPS enabled # systems try: local_md5 = utils.md5(dest) except ValueError: local_md5 = None result = dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum) return ReturnData(conn=conn, result=result)