def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): args = {} if complex_args: args.update(complex_args) args.update(utils.parse_kv(module_args)) dest = args.get('dest') extra_vars = args.get('vars') sources = args.get('sources') if extra_vars: # Extend 'inject' args used in templating if isinstance(extra_vars, dict): inject.update(extra_vars) else: inject.update(utils.parse_kv(extra_vars)) # Catch the case where sources is a str() if not isinstance(sources, list): sources = [sources] config = ConfigParser() for source in sources: # template the source string source = template.template(self.runner.basedir, source, inject) try: self.read_config(source, inject, config) except Exception as e: return ReturnData(conn=conn, comm_ok=False, result={'failed': True, 'msg': str(e)}) # Dump configparser to string via an emulated file fakefile = StringIO() config.write(fakefile) # Template the file to fill out any variables content = template.template(self.runner.basedir, fakefile.getvalue(), inject) fakefile.close() # Ship this content over to a new file for use with the copy module xfered = self.runner._transfer_str(conn, tmp, 'source', content) copy_module_args = dict( src=xfered, dest=dest, original_basename=os.path.basename(source), follow=True, ) return self.runner._execute_module(conn, tmp, 'copy', '', inject=inject, complex_args=copy_module_args)
def run(self, terms, **kwargs): results = [] if isinstance(terms, basestring): terms = [terms] for term in terms: try: self.reset() # clear out things for this iteration try: if not self.parse_simple_args(term): self.parse_kv_args(parse_kv(term)) except Exception: raise AnsibleError( "unknown error parsing with_sequence arguments: %r" % term ) self.sanity_check() results.extend(self.generate_sequence()) except AnsibleError: raise except Exception: raise AnsibleError( "unknown error generating sequence" ) return results
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) src = options.get('src', None) dest = options.get('dest', None) remote_src = utils.boolean(options.get('remote_src', 'no')) if src is None: result = dict(failed=True, msg="src is required") return ReturnData(conn=conn, comm_ok=False, result=result) if remote_src: return self.runner._execute_module(conn, tmp, 'patch', module_args, inject=inject, complex_args=complex_args) # Source is local if '_original_file' in inject: src = utils.path_dwim_relative(inject['_original_file'], 'files', src, self.runner.basedir) else: src = utils.path_dwim(self.runner.basedir, src) if tmp is None or "-tmp-" not in tmp: tmp = self.runner._make_tmp_path(conn) tmp_src = conn.shell.join_path(tmp, os.path.basename(src)) conn.put_file(src, tmp_src) if self.runner.become and self.runner.become_user != 'root': if not self.runner.noop_on_check(inject): self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp) new_module_args = dict(src=tmp_src, ) if self.runner.noop_on_check(inject): new_module_args['CHECKMODE'] = True module_args = utils.merge_module_args(module_args, new_module_args) return self.runner._execute_module(conn, tmp, 'patch', module_args, inject=inject, complex_args=complex_args)
def _execute_fetch(self, conn, tmp): ''' handler for fetch operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: results = dict(failed=True, msg="src and dest are required") return ReturnData(host=conn.host, result=results) # apply templating to source argument inject = self.setup_cache.get(conn.host, {}) if self.module_vars is not None: inject.update(self.module_vars) source = utils.template(source, inject, self.setup_cache) # apply templating to dest argument dest = utils.template(dest, inject, self.setup_cache) # files are saved in dest dir, with a subdir for each host, then the filename dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), conn.host, source) dest = dest.replace("//", "/") # compare old and new md5 for support of change hooks local_md5 = None if os.path.exists(dest): local_md5 = os.popen( "/usr/bin/md5sum %(file)s 2> /dev/null || /sbin/md5 -q %(file)s" % { "file": dest }).read().split()[0] remote_md5 = self._low_level_exec_command( conn, "/usr/bin/md5sum %(file)s 2> /dev/null || /sbin/md5 -q %(file)s" % { "file": source }, tmp, True).split()[0] if remote_md5 != local_md5: # create the containing directories, if needed if not os.path.isdir(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes conn.fetch_file(source, dest) new_md5 = os.popen( "/usr/bin/md5sum %(file)s 2> /dev/null || /sbin/md5 -q %(file)s" % { "file": dest }).read().split()[0] if new_md5 != remote_md5: result = dict(failed=True, msg="md5 mismatch", md5sum=new_md5) return ReturnData(host=conn.host, result=result) result = dict(changed=True, md5sum=new_md5) return ReturnData(host=conn.host, result=result) else: result = dict(changed=False, md5sum=local_md5) return ReturnData(host=conn.host, result=result)
def run(self, terms, inject=None, **kwargs): results = [] terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) if isinstance(terms, basestring): terms = [terms] for term in terms: try: self.reset() # clear out things for this iteration try: if not self.parse_simple_args(term): self.parse_kv_args(utils.parse_kv(term)) except Exception: raise AnsibleError( "unknown error parsing with_sequence arguments: %r" % term) self.sanity_check() if self.stride != 0: results.extend(self.generate_sequence()) except AnsibleError: raise except Exception, e: raise AnsibleError("unknown error generating sequence: %s" % str(e))
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): args = {} if complex_args: args.update(complex_args) kv = utils.parse_kv(module_args) args.update(kv) runner = self.runner module_path = None # don't add any new dirs to module path forks = 1 # don't fan out the forks again cb = EmbeddedPlaybookCallbacks() stats = callbacks.AggregateStats() extra_vars = {} # ? only_tags = None # XXX? skip_tags = None # XXX? force_handlers = False # XXX? play = args play.setdefault('hosts', runner.pattern) pb = EmbeddedPlayBook( playbook='(embedded playbook)', module_path=module_path, inventory=runner.inventory, forks=forks, remote_user=runner.remote_user, remote_pass=runner.remote_pass, runner_callbacks=runner.callbacks, callbacks=cb, stats=stats, timeout=runner.timeout, transport=runner.transport, sudo=runner.sudo, sudo_user=runner.sudo_user, sudo_pass=runner.sudo_pass, extra_vars=extra_vars, private_key_file=runner.private_key_file, only_tags=only_tags, skip_tags=skip_tags, check=runner.check, diff=runner.diff, su=runner.su, su_pass=runner.su_pass, su_user=runner.su_user, vault_password=runner.vault_pass, force_handlers=force_handlers, play=play, ) pb.run() if stats.failures or stats.dark: result = dict(failed=True) elif stats.changed: result = dict(changed=True) else: result = dict(ok=True) return ReturnData(conn=conn, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' run the pause action module ''' # note: this module does not need to pay attention to the 'check' # flag, it always runs hosts = ', '.join(self.runner.host_set) args = {} if complex_args: args.update(complex_args) # extra template call unneeded? args.update(parse_kv(template.template(self.runner.basedir, module_args, inject))) # Are 'minutes' or 'seconds' keys that exist in 'args'? if 'minutes' in args or 'seconds' in args: try: if 'minutes' in args: self.pause_type = 'minutes' # The time() command operates in seconds so we need to # recalculate for minutes=X values. self.seconds = int(args['minutes']) * 60 else: self.pause_type = 'seconds' self.seconds = int(args['seconds']) self.duration_unit = 'seconds' except ValueError, e: raise ae("non-integer value given for prompt duration:\n%s" % str(e))
def test_all_role_templates_tested(ctrl, monkeypatch, request): instance = ctrl.instances['jailhost'] instance.config['roles'] = ' '.join(get_all_roles()) pb = instance.get_playbook() plays = [] monkeypatch.setattr('ansible.playbook.PlayBook._run_play', plays.append) pb.run() # import after running to avoid module import issues from ansible.utils import parse_kv, path_dwim_relative templates = [] for play, task in iter_tasks(plays): if task.module_name != 'template': continue module_args_dict = task.args if not module_args_dict and task.module_args: module_args_dict = parse_kv(task.module_args) template_path = path_dwim_relative( task.module_vars['_original_file'], 'templates', module_args_dict['src'], play.basedir) if not os.path.exists(template_path): # pragma: nocover - only on failure raise ValueError name = module_args_dict['src'].lower() for rep in ('-', '.'): name = name.replace(rep, '_') templates.append(( name, dict( path=task.module_vars.get('_original_file'), role_name=task.role_name, name=module_args_dict['src'], task_name=task.name))) test_names = [x.name for x in request.session.items] for name, info in templates: test_name = 'test_%s_%s' % (info['role_name'], name) if not any(x for x in test_names if x.startswith(test_name)): # pragma: nocover - only on failure pytest.fail("No test '{0}' for template '{name}' of task '{task_name}' in role '{role_name}' at '{path}'.".format(test_name, **info))
def _execute_copy(self, conn, host, tmp): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: return (host, True, dict(failed=True, msg="src and dest are required"), '') # apply templating to source argument inject = self.setup_cache.get(conn.host,{}) source = utils.template(source, inject, self.setup_cache) # transfer the file to a remote tmp location tmp_src = tmp + source.split('/')[-1] conn.put_file(utils.path_dwim(self.basedir, source), tmp_src) # install the copy module self.module_name = 'copy' module = self._transfer_module(conn, tmp, 'copy') # run the copy module args = "src=%s dest=%s" % (tmp_src, dest) (result1, err, executed) = self._execute_module(conn, tmp, module, args) (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed) if ok: return self._chain_file_module(conn, tmp, data, err, options, executed) else: return (host, ok, data, err)
def run(self, conn, tmp, module_name, module_args, inject): args = utils.parse_kv(module_args) if not 'msg' in args: args['msg'] = 'Failed as requested from task' result = dict(failed=True, msg=args['msg']) return ReturnData(conn=conn, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): args = {} if complex_args: args.update(complex_args) # attempt to prevent confusing messages when the variable didn't interpolate module_args = module_args.replace("{{ ","{{").replace(" }}","}}") kv = utils.parse_kv(module_args) args.update(kv) if not 'msg' in args and not 'var' in args: args['msg'] = 'Hello world!' result = {} if 'msg' in args: if 'fail' in args and utils.boolean(args['fail']): result = dict(failed=True, msg=args['msg']) else: result = dict(msg=args['msg']) elif 'var' in args: results = template.template(None, "{{ %s }}" % args['var'], inject) result[args['var']] = results # force flag to make debug output module always verbose result['verbose_always'] = True return ReturnData(conn=conn, result=result)
def _execute_fetch(self, conn, host, tmp): ''' handler for fetch operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: return (host, True, dict(failed=True, msg="src and dest are required"), '') # files are saved in dest dir, with a subdir for each host, then the filename dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), host, source) dest = dest.replace("//","/") # compare old and new md5 for support of change hooks local_md5 = None if os.path.exists(dest): local_md5 = os.popen("md5sum %s" % dest).read().split()[0] remote_md5 = self._exec_command(conn, "md5sum %s" % source, tmp, True)[0].split()[0] if remote_md5 != local_md5: # create the containing directories, if needed os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes conn.fetch_file(source, dest) new_md5 = os.popen("md5sum %s" % dest).read().split()[0] if new_md5 != remote_md5: return (host, True, dict(failed=True, msg="md5 mismatch", md5sum=new_md5), '') return (host, True, dict(changed=True, md5sum=new_md5), '') else: return (host, True, dict(changed=False, md5sum=local_md5), '')
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): args = {} if complex_args: args.update(complex_args) # attempt to prevent confusing messages when the variable didn't interpolate module_args = module_args.replace("{{ ", "{{").replace(" }}", "}}") kv = utils.parse_kv(module_args) args.update(kv) if not 'msg' in args: args['msg'] = 'Hello world!' if 'fail' in args and utils.boolean(args['fail']): result = dict(failed=True, msg=args['msg']) else: result = dict(msg=args['msg']) return ReturnData(conn=conn, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): args = {} if complex_args: args.update(complex_args) # attempt to prevent confusing messages when the variable didn't interpolate module_args = module_args.replace("{{ ","{{").replace(" }}","}}") kv = utils.parse_kv(module_args) args.update(kv) if not 'msg' in args and not 'var' in args: args['msg'] = 'Hello world!' result = {} if 'msg' in args: if 'fail' in args and utils.boolean(args['fail']): result = dict(failed=True, msg=args['msg']) else: result = dict(msg=args['msg']) elif 'var' in args and not utils.LOOKUP_REGEX.search(args['var']): results = template.template(self.basedir, args['var'], inject, convert_bare=True) result[args['var']] = results # force flag to make debug output module always verbose result['verbose_always'] = True return ReturnData(conn=conn, result=result)
def _execute_template(self, conn, host, tmp): ''' handler for template operations ''' # load up options options = utils.parse_kv(self.module_args) source = options['src'] dest = options['dest'] metadata = options.get('metadata', None) if metadata is None: if self.remote_user == 'root': metadata = '/etc/ansible/setup' else: metadata = '~/.ansible/setup' # first copy the source template over temppath = tmp + os.path.split(source)[-1] conn.put_file(utils.path_dwim(self.basedir, source), temppath) # install the template module template_module = self._transfer_module(conn, tmp, 'template') # run the template module args = "src=%s dest=%s metadata=%s" % (temppath, dest, metadata) (result1, err, executed) = self._execute_module(conn, tmp, template_module, args) (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed) if ok: return self._chain_file_module(conn, tmp, data, err, options, executed) else: return (host, ok, data, err)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): # the group_by module does not need to pay attention to check mode. # it always runs. args = {} if complex_args: args.update(complex_args) args.update(parse_kv(self.runner.module_args)) if not 'key' in args: raise ae("'key' is a required argument.") vv("created 'group_by' ActionModule: key=%s" % (args['key'])) inventory = self.runner.inventory result = {'changed': False} ### find all groups groups = {} for host in self.runner.host_set: data = inject['hostvars'][host] if not check_conditional( template.template(self.runner.basedir, self.runner.conditional, data)): continue group_name = template.template(self.runner.basedir, args['key'], data) group_name = group_name.replace(' ', '-') if group_name not in groups: groups[group_name] = [] groups[group_name].append(host) result['groups'] = groups ### add to inventory for group, hosts in groups.items(): inv_group = inventory.get_group(group) if not inv_group: inv_group = ansible.inventory.Group(name=group) inventory.add_group(inv_group) for host in hosts: del self.runner.inventory._vars_per_host[host] inv_host = inventory.get_host(host) if not inv_host: inv_host = ansible.inventory.Host(name=host) if inv_group not in inv_host.get_groups(): result['changed'] = True inv_group.add_host(inv_host) return ReturnData(conn=conn, comm_ok=True, result=result)
def run(self, conn, tmp, module_name, module_args, inject): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(module_args) tokens = shlex.split(module_args) source = tokens[0] # FIXME: error handling args = " ".join(tokens[1:]) source = utils.template(self.runner.basedir, source, inject) source = utils.path_dwim(self.runner.basedir, source) exec_rc = None # transfer the file to a remote tmp location source = source.replace('\x00','') # why does this happen here? args = args.replace('\x00','') # why does this happen here? tmp_src = os.path.join(tmp, os.path.basename(source)) tmp_src = tmp_src.replace('\x00', '') conn.put_file(source, tmp_src) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp) # make executable self.runner._low_level_exec_command(conn, "chmod +x %s" % tmp_src, tmp) # run it through the command module module_args = tmp_src + " " + args + " #USE_SHELL" return self.runner._execute_module(conn, tmp, 'command', module_args, inject=inject)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): args = {} if complex_args: args.update(complex_args) args.update(utils.parse_kv(module_args)) path = args.get("path") if not path: raise errors.AnsibleError('"path" is a required argument') gpg_path = args.get("gpg", "gpg") gpg = subprocess.Popen([gpg_path, "-q", "-d", path], stdout=subprocess.PIPE) stdout, _stderr = gpg.communicate() if gpg.returncode != 0: raise errors.AnsibleError("error calling gpg") try: gpg_vars = utils.parse_yaml(stdout) except yaml.YAMLError, ex: utils.process_yaml_error(ex, stdout, path)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): args = {} if complex_args: args.update(complex_args) args.update(parse_kv(module_args)) hostname = args.get("hostname", "null") key_path = args.get("key_path", "null") failed = True command = 'ssh -i %s -q -o "StrictHostKeyChecking=no" -o "BatchMode=yes" ec2-user@%s "echo 2>&1" && echo "UP" || echo "DOWN"' % (key_path, hostname) process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() failed = False if output.find("UP") < 0: failed = True message = "Successfully tested SSH connectivity to %s" % hostname if failed: message = "Failed to successfully connect via SSH to %s. Please check your SSH configuration." % hostname return ReturnData(conn=conn, comm_ok=True, result=dict(failed=failed, changed=False, msg=message))
def _execute_copy(self, conn, host, tmp): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: return (host, True, dict(failed=True, msg="src and dest are required"), '') # transfer the file to a remote tmp location tmp_src = tmp + source.split('/')[-1] conn.put_file(utils.path_dwim(self.basedir, source), tmp_src) # install the copy module self.module_name = 'copy' module = self._transfer_module(conn, tmp, 'copy') # run the copy module args = "src=%s dest=%s" % (tmp_src, dest) (result1, err, executed) = self._execute_module(conn, tmp, module, args) (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed) if ok: return self._chain_file_module(conn, tmp, data, err, options, executed) else: return (host, ok, data, err)
def run(self, conn, tmp, module_name, module_args, inject): args = parse_kv(module_args) if not 'hostname' in args: raise ae("'hostname' is a required argument.") vv("created 'add_host' ActionModule: hostname=%s" % (args['hostname'])) result = {'changed': True} new_host = Host(args['hostname']) inventory = self.runner.inventory # add the new host to the 'all' group allgroup = inventory.get_group('all') allgroup.add_host(new_host) result['changed'] = True # add it to the group if that was specified if 'groupname' in args: if not inventory.get_group(args['groupname']): new_group = Group(args['groupname']) inventory.add_group(new_group) ngobj = inventory.get_group(args['groupname']) ngobj.add_host(new_host) vv("created 'add_host' ActionModule: groupname=%s" % (args['groupname'])) result['new_group'] = args['groupname'] result['new_host'] = args['hostname'] return ReturnData(conn=conn, comm_ok=True, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(module_args) image = options.get('image', None) if image and os.path.exists(image): ### Assume remote file if image does not exist locally # FIXME: We are transfering to /tmp in order to allow user qemu to have access tmp_image = os.path.join('/tmp', os.path.basename(image)) conn.put_file(image, tmp_image) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_image, tmp) module_args = "%s image=%s" % (module_args, tmp_image) return self.runner._execute_module(conn, tmp, 'virt_boot', module_args, inject=inject)
def run(self, conn, tmp, module_name, module_args, inject): args = parse_kv(module_args) if not "hostname" in args: raise ae("'hostname' is a required argument.") vv("created 'add_host' ActionModule: hostname=%s" % (args["hostname"])) result = {"changed": True} new_host = Host(args["hostname"]) inventory = self.runner.inventory # add the new host to the 'all' group allgroup = inventory.get_group("all") allgroup.add_host(new_host) result["changed"] = True # add it to the group if that was specified if "groupname" in args: if not inventory.get_group(args["groupname"]): new_group = Group(args["groupname"]) inventory.add_group(new_group) ngobj = inventory.get_group(args["groupname"]) ngobj.add_host(new_host) vv("created 'add_host' ActionModule: groupname=%s" % (args["groupname"])) result["new_group"] = args["groupname"] result["new_host"] = args["hostname"] return ReturnData(conn=conn, comm_ok=True, result=result)
def run( self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs ): ''' generates params and passes them on to the rsync module ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) src = options.get('src', None) dest = options.get('dest', None) try: options['local_rsync_path'] = inject['ansible_rsync_path'] except KeyError: pass dest_host = inject.get('delegate_to', inject.get('ansible_ssh_user', inject['inventory_hostname'])) if dest_host in ['localhost', '127.0.0.1']: dest_host = '127.0.0.1' src_host = '127.0.0.1' # inventory_hostname is localhost when transport not local if options.get('mode', 'push') == 'pull': (dest_host, src_host) = (src_host, dest_host) if not dest_host is src_host: user = inject.get('ansible_ssh_user', self.runner.remote_user) # should we support ssh_password and ssh_port here?? options['private_key'] = \ inject.get('ansible_ssh_private_key_file', self.runner.private_key_file) src = self._process_origin(src_host, src, user) dest = self._process_origin(dest_host, dest, user) options['src'] = src options['dest'] = dest try: del options['mode'] except KeyError: pass # run the synchronize module self.runner.module_args = ' '.join(['%s=%s' % (k, v) for (k, v) in options.items()]) return self.runner._execute_module(conn, tmp, 'synchronize', self.runner.module_args, inject=inject)
def run(self, conn, tmp, module_name, inject): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(self.runner.module_args) source = options.get('src', None) dest = options.get('dest', None) if (source is None and not 'first_available_file' in inject) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in inject.get('first_available_file'): fn = utils.template(fn, inject) if os.path.exists(fn): source = fn found = True break if not found: results = dict( failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, results=results) source = utils.template(source, inject) source = utils.path_dwim(self.runner.basedir, source) local_md5 = utils.md5(source) if local_md5 is None: result = dict(failed=True, msg="could not find src=%s" % source) return ReturnData(conn=conn, result=result) remote_md5 = self.runner._remote_md5(conn, tmp, dest) exec_rc = None if local_md5 != remote_md5: # transfer the file to a remote tmp location tmp_src = tmp + os.path.basename(source) conn.put_file(source, tmp_src) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp) # run the copy module self.runner.module_args = "%s src=%s" % (self.runner.module_args, tmp_src) return self.runner._execute_module( conn, tmp, 'copy', self.runner.module_args, inject=inject).daisychain('file') else: # no need to transfer the file, already correct md5 result = dict(changed=False, md5sum=remote_md5, transferred=False) return ReturnData(conn=conn, result=result).daisychain('file')
def run(self, terms, **kwargs): results = [] if isinstance(terms, basestring): terms = [terms] for term in terms: try: self.reset() # clear out things for this iteration try: if not self.parse_simple_args(term): self.parse_kv_args(parse_kv(term)) except Exception: raise AnsibleError( "unknown error parsing with_sequence arguments: %r" % term) self.sanity_check() results.extend(self.generate_sequence()) except AnsibleError: raise except Exception: raise AnsibleError("unknown error generating sequence") return results
def run(self, terms, inject=None, **kwargs): results = [] terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) if isinstance(terms, basestring): terms = [ terms ] for term in terms: try: self.reset() # clear out things for this iteration try: if not self.parse_simple_args(term): self.parse_kv_args(utils.parse_kv(term)) except Exception: raise AnsibleError( "unknown error parsing with_sequence arguments: %r" % term ) self.sanity_check() if self.stride != 0: results.extend(self.generate_sequence()) except AnsibleError: raise except Exception, e: raise AnsibleError( "unknown error generating sequence: %s" % str(e) )
def _execute_fetch(self, conn, host, tmp): ''' handler for fetch operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: return (host, True, dict(failed=True, msg="src and dest are required"), '') # files are saved in dest dir, with a subdir for each host, then the filename filename = os.path.basename(source) dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), host, filename) # compare old and new md5 for support of change hooks local_md5 = None if os.path.exists(dest): local_md5 = os.popen("md5sum %s" % dest).read().split()[0] remote_md5 = self._exec_command(conn, "md5sum %s" % source, tmp, True)[0].split()[0] if remote_md5 != local_md5: # create the containing directories, if needed os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes conn.fetch_file(source, dest) new_md5 = os.popen("md5sum %s" % dest).read().split()[0] changed = (new_md5 != local_md5) if new_md5 != remote_md5: return (host, True, dict(failed=True, msg="md5 mismatch", md5sum=new_md5), '') return (host, True, dict(changed=True, md5sum=new_md5), '') else: return (host, True, dict(changed=False, md5sum=local_md5), '')
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): try: args = {} if complex_args: args.update(complex_args) args.update(parse_kv(module_args)) envdict={} if self.runner.environment: env=template.template(self.runner.basedir, self.runner.environment, inject, convert_bare=True) env = utils.safe_eval(env) testQueue = args["queue_name"] region = args["region"] spec_file = path.expanduser("~/.nucleator/contrib/Bucketandq/orchestrator/specification.json") queue = self.initQueue(testQueue, region, env) self.main(self.addMessageToQueue, queue, spec_file) return ReturnData(conn=conn, comm_ok=True, result=dict(failed=False, changed=False, msg="Bucketandq Messages Created!")) except Exception, e: # deal with failure gracefully result = dict(failed=True, msg=type(e).__name__ + ": " + str(e)) return ReturnData(conn=conn, comm_ok=False, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): # note: the fail module does not need to pay attention to check mode # it always runs. args = {} if complex_args: args.update(complex_args) args.update(utils.parse_kv(module_args)) msg = '' if 'msg' in args: msg = args['msg'] if not 'that' in args: raise errors.AnsibleError('conditional required in "that" string') if not isinstance(args['that'], list): args['that'] = [ args['that'] ] for that in args['that']: result = utils.check_conditional(that, self.runner.basedir, inject, fail_on_undefined=True) if not result: return ReturnData(conn=conn, result=dict(failed=True, assertion=that, evaluated_to=result)) return ReturnData(conn=conn, result=dict(msg='all assertions passed'))
def _execute_template(self, conn, host, tmp): """ handler for template operations """ # load up options options = utils.parse_kv(self.module_args) source = options["src"] dest = options["dest"] metadata = options.get("metadata", None) if metadata is None: if self.remote_user == "root": metadata = "/etc/ansible/setup" else: metadata = "~/.ansible/setup" # first copy the source template over temppath = tmp + os.path.split(source)[-1] self._transfer_file(conn, utils.path_dwim(self.basedir, source), temppath) # install the template module template_module = self._transfer_module(conn, tmp, "template") # run the template module args = ["src=%s" % temppath, "dest=%s" % dest, "metadata=%s" % metadata] (result1, executed) = self._execute_module(conn, tmp, template_module, args) (host, ok, data) = self._return_from_module(conn, host, result1, executed) if ok: return self._chain_file_module(conn, tmp, data, options, executed) else: return (host, ok, data)
def _execute_copy(self, conn, host, tmp): """ handler for file transfer operations """ # load up options options = utils.parse_kv(self.module_args) source = options["src"] dest = options["dest"] # transfer the file to a remote tmp location tmp_src = tmp + source.split("/")[-1] self._transfer_file(conn, utils.path_dwim(self.basedir, source), tmp_src) # install the copy module self.module_name = "copy" module = self._transfer_module(conn, tmp, "copy") # run the copy module args = ["src=%s" % tmp_src, "dest=%s" % dest] (result1, executed) = self._execute_module(conn, tmp, module, args) (host, ok, data) = self._return_from_module(conn, host, result1, executed) if ok: return self._chain_file_module(conn, tmp, data, options, executed) else: return (host, ok, data)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for template operations ''' # note: since this module just calls the copy module, the --check mode support # can be implemented entirely over there if not self.runner.is_playbook: raise errors.AnsibleError("in current versions of ansible, templates are only usable in playbooks") # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) if (source is None and 'first_available_file' not in inject) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, comm_ok=False, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in self.runner.module_vars.get('first_available_file'): fn_orig = fn fnt = template.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if not os.path.exists(fnd) and '_original_file' in inject: fnd = utils.path_dwim_relative(inject['_original_file'], 'templates', fnt, self.runner.basedir, check=False) if os.path.exists(fnd): source = fnd found = True break if not found: result = dict(failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, comm_ok=False, result=result) else: source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'templates', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) if dest.endswith("/"): # CCTODO: Fix path for Windows hosts. base = os.path.basename(source) dest = os.path.join(dest, base) # template the source data locally & get ready to transfer try: resultant = template.template_from_file(self.runner.basedir, source, inject, vault_password=self.runner.vault_pass) except Exception, e: result = dict(failed=True, msg=type(e).__name__ + ": " + str(e)) return ReturnData(conn=conn, comm_ok=False, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): # the group_by module does not need to pay attention to check mode. # it always runs. args = {} if complex_args: args.update(complex_args) args.update(parse_kv(self.runner.module_args)) if not 'key' in args: raise ae("'key' is a required argument.") vv("created 'group_by' ActionModule: key=%s"%(args['key'])) inventory = self.runner.inventory result = {'changed': False} ### find all groups groups = {} for host in self.runner.host_set: data = {} data.update(inject) data.update(inject['hostvars'][host]) conds = self.runner.conditional if type(conds) != list: conds = [ conds ] next_host = False for cond in conds: if not check_conditional(cond, self.runner.basedir, data, fail_on_undefined=self.runner.error_on_undefined_vars): next_host = True break if next_host: continue group_name = template.template(self.runner.basedir, args['key'], data) group_name = group_name.replace(' ','-') if group_name not in groups: groups[group_name] = [] groups[group_name].append(host) result['groups'] = groups ### add to inventory for group, hosts in groups.items(): inv_group = inventory.get_group(group) if not inv_group: inv_group = ansible.inventory.Group(name=group) inventory.add_group(inv_group) for host in hosts: del self.runner.inventory._vars_per_host[host] inv_host = inventory.get_host(host) if not inv_host: inv_host = ansible.inventory.Host(name=host) if inv_group not in inv_host.get_groups(): result['changed'] = True inv_group.add_host(inv_host) return ReturnData(conn=conn, comm_ok=True, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' generates params and passes them on to the rsync module ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) src = options.get('src', None) dest = options.get('dest', None) src = template.template(self.runner.basedir, src, inject) dest = template.template(self.runner.basedir, dest, inject) try: options['local_rsync_path'] = inject['ansible_rsync_path'] except KeyError: pass # from the perspective of the rsync call the delegate is the localhost src_host = '127.0.0.1' dest_host = inject.get('ansible_ssh_host', inject['inventory_hostname']) # edge case: explicit delegate and dest_host are the same if dest_host == inject['delegate_to']: dest_host = '127.0.0.1' if options.get('mode', 'push') == 'pull': (dest_host, src_host) = (src_host, dest_host) if not dest_host is src_host: user = inject.get('ansible_ssh_user', self.runner.remote_user) private_key = inject.get('ansible_ssh_private_key_file', self.runner.private_key_file) if not private_key is None: options['private_key'] = private_key src = self._process_origin(src_host, src, user) dest = self._process_origin(dest_host, dest, user) options['src'] = src options['dest'] = dest if 'mode' in options: del options['mode'] rsync_path = options.get('rsync_path', None) if not rsync_path and self.runner.sudo: rsync_path = 'sudo rsync' # make sure rsync path is quoted. if rsync_path: options['rsync_path'] = '"' + rsync_path + '"' module_items = ' '.join(['%s=%s' % (k, v) for (k, v) in options.items()]) return self.runner._execute_module(conn, tmp, 'synchronize', module_items, inject=inject)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) src = options.get('src', None) dest = options.get('dest', None) delimiter = options.get('delimiter', None) remote_src = utils.boolean(options.get('remote_src', 'yes')) if src is None or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, comm_ok=False, result=result) if remote_src: return self.runner._execute_module(conn, tmp, 'assemble', module_args, inject=inject, complex_args=complex_args) elif '_original_file' in inject: src = utils.path_dwim_relative(inject['_original_file'], 'files', src, self.runner.basedir) else: # the source is local, so expand it here src = os.path.expanduser(src) # Does all work assembling the file path = self._assemble_from_fragments(src, delimiter) pathmd5 = utils.md5s(path) remote_md5 = self.runner._remote_md5(conn, tmp, dest) if pathmd5 != remote_md5: resultant = file(path).read() if self.runner.diff: dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, inject=inject, persist_files=True) if 'content' in dest_result.result: dest_contents = dest_result.result['content'] if dest_result.result['encoding'] == 'base64': dest_contents = base64.b64decode(dest_contents) else: raise Exception("unknown encoding, failed: %s" % dest_result.result) xfered = self.runner._transfer_str(conn, tmp, 'src', resultant) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % xfered, tmp) # run the copy module module_args = "%s src=%s dest=%s original_basename=%s" % (module_args, pipes.quote(xfered), pipes.quote(dest), pipes.quote(os.path.basename(src))) if self.runner.noop_on_check(inject): return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=src, after=resultant)) else: res = self.runner._execute_module(conn, tmp, 'copy', module_args, inject=inject) res.diff = dict(after=resultant) return res else: module_args = "%s src=%s dest=%s original_basename=%s" % (module_args, pipes.quote(xfered), pipes.quote(dest), pipes.quote(os.path.basename(src))) return self.runner._execute_module(conn, tmp, 'file', module_args, inject=inject)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) copy = utils.boolean(options.get('copy', 'yes')) if source is None or dest is None: result = dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) dest = os.path.expanduser(dest) # CCTODO: Fix path for Windows hosts. source = template.template(self.runner.basedir, os.path.expanduser(source), inject) if copy: if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) remote_md5 = self.runner._remote_md5(conn, tmp, dest) if remote_md5 != '3': result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest) return ReturnData(conn=conn, result=result) if copy: # transfer the file to a remote tmp location tmp_src = tmp + 'source' conn.put_file(source, tmp_src) # handle diff mode client side # handle check mode client side # fix file permissions when the copy is done as a different user if copy: if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root': if not self.runner.noop_on_check(inject): self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp) # Build temporary module_args. new_module_args = dict( src=tmp_src, original_basename=os.path.basename(source), ) # make sure checkmod is passed on correctly if self.runner.noop_on_check(inject): new_module_args['CHECKMODE'] = True module_args = utils.merge_module_args(module_args, new_module_args) else: module_args = "%s original_basename=%s" % (module_args, pipes.quote(os.path.basename(source))) # make sure checkmod is passed on correctly if self.runner.noop_on_check(inject): module_args += " CHECKMODE=True" return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
def _merge_args(self, module_args, complex_args): args = {} if complex_args: args.update(complex_args) kv = parse_kv(module_args) args.update(kv) return args
def _load_options(self, module_args, complex_args): ''' Load module options. ''' options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) return options
def _execute_template(self, conn, host, tmp): ''' handler for template operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) metadata = options.get('metadata', None) if source is None or dest is None: return (host, True, dict(failed=True, msg="src and dest are required"), '') # apply templating to source argument so vars can be used in the path inject = self.setup_cache.get(conn.host,{}) source = utils.template(source, inject, self.setup_cache) (host, ok, data, err) = (None, None, None, None) if self.is_playbook: # not running from a playbook so we have to fetch the remote # setup file contents before proceeding... if metadata is None: if self.remote_user == 'root': metadata = '/etc/ansible/setup' else: # path is expanded on remote side metadata = "~/.ansible/setup" # install the template module slurp_module = self._transfer_module(conn, tmp, 'slurp') # run the slurp module to get the metadata file args = "src=%s" % metadata (result1, err, executed) = self._execute_module(conn, tmp, slurp_module, args) result1 = utils.json_loads(result1) if not 'content' in result1 or result1.get('encoding','base64') != 'base64': result1['failed'] = True return self._return_from_module(conn, host, result1, err, executed) content = base64.b64decode(result1['content']) inject = utils.json_loads(content) # install the template module try: resultant = utils.template(source_data, inject, self.setup_cache) copy_module = self._transfer_module(conn, tmp, 'copy') # template the source data locally source_data = file(utils.path_dwim(self.basedir, source)).read() resultant = '' (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed) if ok: return self._chain_file_module(conn, tmp, data, err, options, executed) else: return (host, ok, data, err) except Exception, e: return (host, False, dict(failed=True, msg=str(e)), '')
def run(self, conn, tmp_path, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) content = options.get('content', None) dest = options.get('dest', None) raw = utils.boolean(options.get('raw', 'no')) force = utils.boolean(options.get('force', 'yes')) if (source is None and content is None and not 'first_available_file' in inject) or dest is None: result = dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) elif (source is not None or 'first_available_file' in inject) and content is not None: result = dict(failed=True, msg="src and content are mutually exclusive") return ReturnData(conn=conn, result=result) # Check if the source ends with a "/" source_trailing_slash = False if source: source_trailing_slash = source.endswith("/") # Define content_tempfile in case we set it after finding content populated. content_tempfile = None # If content is defined make a temp file and write the content into it. if content is not None: try: # If content comes to us as a dict it should be decoded json. # We need to encode it back into a string to write it out. if type(content) is dict: content_tempfile = self._create_content_tempfile( json.dumps(content)) else: content_tempfile = self._create_content_tempfile(content) source = content_tempfile except Exception, err: result = dict(failed=True, msg="could not write content temp file: %s" % err) return ReturnData(conn=conn, result=result)
def _execute_copy(self, conn, host, tmp): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(self.module_args) source = options.get('src', None) dest = options.get('dest', None) if (source is None and not 'first_available_file' in self.module_vars ) or dest is None: return (host, True, dict(failed=True, msg="src and dest are required"), '') # apply templating to source argument inject = self.setup_cache.get(conn.host, {}) # FIXME: break duplicate code up into subfunction # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in self.module_vars: found = False for fn in self.module_vars.get('first_available_file'): fn = utils.template(fn, inject, self.setup_cache) if os.path.exists(fn): source = fn found = True break if not found: return ( host, True, dict( failed=True, msg="could not find src in first_available_file list"), '') source = utils.template(source, inject, self.setup_cache) # transfer the file to a remote tmp location tmp_src = tmp + source.split('/')[-1] conn.put_file(utils.path_dwim(self.basedir, source), tmp_src) # install the copy module self.module_name = 'copy' module = self._transfer_module(conn, tmp, 'copy') # run the copy module args = "src=%s dest=%s" % (tmp_src, dest) (result1, err, executed) = self._execute_module(conn, tmp, module, args) (host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed) if ok: return self._chain_file_module(conn, tmp, data, err, options, executed) else: return (host, ok, data, err)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): JAIL_OPTIONS = [ 'logpath', 'ignoreregex', 'maxretry', 'bantime', 'findtime', 'action', 'port', 'protocol', 'filter' ] # Load up options. options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) # Collect vars for the template. jail_conf = dict() name = options.get('name') inject['name'] = name jail_conf['enabled'] = options.get('enabled', 'true') for option in JAIL_OPTIONS: if options.has_key(option): jail_conf[option] = options.get(option) inject['jail_conf'] = jail_conf action_path = utils.plugins.action_loader.find_plugin(module_name) src = os.path.realpath( os.path.dirname(action_path) + '/../templates/jail.j2') dest = os.path.join(inject['fail2ban_dir_parts'], '01_%s.jail' % (name)) module_args = 'src=%s dest=%s' % (src, dest) handler = utils.plugins.action_loader.get('template', self.runner) return_data = handler.run(conn, tmp, 'template', module_args, inject) if return_data.result.has_key('failed'): return return_data # Dirty service notification. if return_data.result['changed']: cp = callback_plugins[0] handler_name = 'assemble jail.local' cp.playbook._flag_handler( cp.play, template(cp.play.basedir, handler_name, cp.task.module_vars), inject['inventory_hostname']) result = dict() result['changed'] = return_data.result['changed'] if result['changed']: result['msg'] = 'Jail %s added.' % name return ReturnData(conn=conn, comm_ok=True, result=result)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for running operations on master ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) return ReturnData(conn=conn, result=dict(ansible_facts=options))
def write_argsfile(argstring, json=False): """ Write args to a file for old-style module's use. """ argspath = os.path.expanduser("~/.ansible_test_module_arguments") argsfile = open(argspath, 'w') if json: args = utils.parse_kv(argstring) argstring = utils.jsonify(args) argsfile.write(argstring) argsfile.close() return argspath
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: result = dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) remote_md5 = self.runner._remote_md5(conn, tmp, dest) if remote_md5 != '3': result = dict(failed=True, msg="dest must be an existing dir") return ReturnData(conn=conn, result=result) # transfer the file to a remote tmp location tmp_src = tmp + 'source' conn.put_file(source, tmp_src) # handle diff mode client side # handle check mode client side # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp) module_args = "%s src=%s original_basename=%s" % ( module_args, pipes.quote(tmp_src), pipes.quote(os.path.basename(source))) return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
def run(self, conn, tmp, module_name, module_args, inject): ''' handler for file transfer operations ''' # load up options options = utils.parse_kv(module_args) inventory = options.get('inventory', '/etc/ansible/network-inventory.yml') tmp_inventory = tmp + os.path.basename(inventory) conn.put_file(inventory, tmp_inventory) module_args = "%s inventory=%s" % (module_args, tmp_inventory) return self.runner._execute_module(conn, tmp, 'network_facts', module_args, inject=inject)
def run(self, conn, tmp, module_name, module_args, inject): ''' handler for fetch operations ''' # load up options options = utils.parse_kv(module_args) source = options.get('src', None) dest = options.get('dest', None) if source is None or dest is None: results = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, result=results) # apply templating to source argument source = utils.template(self.runner.basedir, source, inject) # apply templating to dest argument dest = utils.template(self.runner.basedir, dest, inject) # files are saved in dest dir, with a subdir for each host, then the filename dest = "%s/%s/%s" % (utils.path_dwim(self.runner.basedir, dest), conn.host, source) dest = dest.replace("//","/") # calculate md5 sum for the remote file remote_md5 = self.runner._remote_md5(conn, tmp, source) # these don't fail because you may want to transfer a log file that possibly MAY exist # but keep going to fetch other log files if remote_md5 == '0': result = dict(msg="unable to calculate the md5 sum of the remote file", file=source, changed=False) return ReturnData(conn=conn, result=result) if remote_md5 == '1': result = dict(msg="the remote file does not exist, not transferring, ignored", file=source, changed=False) return ReturnData(conn=conn, result=result) if remote_md5 == '2': result = dict(msg="no read permission on remote file, not transferring, ignored", file=source, changed=False) return ReturnData(conn=conn, result=result) # calculate md5 sum for the local file local_md5 = utils.md5(dest) if remote_md5 != local_md5: # create the containing directories, if needed if not os.path.isdir(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) # fetch the file and check for changes conn.fetch_file(source, dest) new_md5 = utils.md5(dest) if new_md5 != remote_md5: result = dict(failed=True, md5sum=new_md5, msg="md5 mismatch", file=source) return ReturnData(conn=conn, result=result) result = dict(changed=True, md5sum=new_md5) return ReturnData(conn=conn, result=result) else: result = dict(changed=False, md5sum=local_md5, file=source) return ReturnData(conn=conn, result=result)
def run(self, conn, tmp, module_name, module_args, inject): args = utils.parse_kv(module_args) if not 'msg' in args: args['msg'] = 'Hello world!' if 'fail' in args and utils.boolean(args['fail']): result = dict(failed=True, msg=args['msg']) else: result = dict(msg=str(args['msg'])) return ReturnData(conn=conn, result=result)
def _execute_assemble(self, conn, tmp): ''' handler for assemble operations ''' module_name = 'assemble' options = utils.parse_kv(self.module_args) module = self._transfer_module(conn, tmp, module_name) exec_rc = self._execute_module(conn, tmp, module, self.module_args) if exec_rc.is_successful(): return self._chain_file_module(conn, tmp, exec_rc, options) else: return exec_rc
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): """ handler for file transfer operations """ # load up options options = utils.parse_kv(module_args) inventory = options.get("inventory", "/etc/ansible/network-inventory.yml") tmp_inventory = tmp + os.path.basename(inventory) conn.put_file(inventory, tmp_inventory) module_args = "%s inventory=%s" % (module_args, tmp_inventory) return self.runner._execute_module(conn, tmp, "network_facts", module_args, inject=inject)
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) content = options.get('content', None) dest = options.get('dest', None) raw = utils.boolean(options.get('raw', 'no')) force = utils.boolean(options.get('force', 'yes')) if (source is None and content is None and not 'first_available_file' in inject) or dest is None: result=dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) elif (source is not None or 'first_available_file' in inject) and content is not None: result=dict(failed=True, msg="src and content are mutually exclusive") return ReturnData(conn=conn, result=result) source_trailing_slash = False if source: source_trailing_slash = source.endswith("/") # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in inject.get('first_available_file'): fn_orig = fn fnt = template.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if not os.path.exists(fnd) and '_original_file' in inject: fnd = utils.path_dwim_relative(inject['_original_file'], 'files', fnt, self.runner.basedir, check=False) if os.path.exists(fnd): source = fnd found = True break if not found: results=dict(failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, result=results) elif content is not None: fd, tmp_content = tempfile.mkstemp() f = os.fdopen(fd, 'w') try: f.write(content) except Exception, err: os.remove(tmp_content) result = dict(failed=True, msg="could not write content temp file: %s" % err) return ReturnData(conn=conn, result=result) f.close() source = tmp_content