def _save_ssh_host_keys(self, filename): ''' not using the paramiko save_ssh_host_keys function as we want to add new SSH keys at the bottom so folks don't complain about it :) ''' if not self._any_keys_added(): return False path = os.path.expanduser("~/.ssh") makedirs_safe(path) f = open(filename, 'w') for hostname, keys in self.ssh._host_keys.iteritems(): for keytype, key in keys.iteritems(): # was f.write added_this_time = getattr(key, '_added_by_ansible_this_time', False) if not added_this_time: f.write("%s %s %s\n" % (hostname, keytype, key.get_base64())) for hostname, keys in self.ssh._host_keys.iteritems(): for keytype, key in keys.iteritems(): added_this_time = getattr(key, '_added_by_ansible_this_time', False) if added_this_time: f.write("%s %s %s\n" % (hostname, keytype, key.get_base64())) f.close()
def get_config(p, section, key, env_var, default, boolean=False, integer=False, floating=False, islist=False, isnone=False, ispath=False, ispathlist=False, istmppath=False, expand_relative_paths=False): ''' return a configuration variable with casting ''' value = _get_config(p, section, key, env_var, default) if boolean: value = mk_boolean(value) if value: if integer: value = int(value) elif floating: value = float(value) elif islist: if isinstance(value, string_types): value = [x.strip() for x in value.split(',')] elif isnone: if value == "None": value = None elif ispath: value = shell_expand(value) elif istmppath: value = shell_expand(value) if not os.path.exists(value): makedirs_safe(value, 0o700) prefix = 'ansible-local-%s' % os.getpid() value = tempfile.mkdtemp(prefix=prefix, dir=value) elif ispathlist: if isinstance(value, string_types): value = [shell_expand(x, expand_relative_paths=expand_relative_paths) \ for x in value.split(os.pathsep)] elif isinstance(value, string_types): value = unquote(value) return value
def _save_ssh_host_keys(self, filename): ''' not using the paramiko save_ssh_host_keys function as we want to add new SSH keys at the bottom so folks don't complain about it :) ''' if not self._any_keys_added(): return False path = os.path.expanduser("~/.ssh") makedirs_safe(path) f = open(filename, 'w') for hostname, keys in iteritems(self.ssh._host_keys): for keytype, key in iteritems(keys): # was f.write added_this_time = getattr(key, '_added_by_ansible_this_time', False) if not added_this_time: f.write("%s %s %s\n" % (hostname, keytype, key.get_base64())) for hostname, keys in iteritems(self.ssh._host_keys): for keytype, key in iteritems(keys): added_this_time = getattr(key, '_added_by_ansible_this_time', False) if added_this_time: f.write("%s %s %s\n" % (hostname, keytype, key.get_base64())) f.close()
def close(self): ''' terminate the connection ''' cache_key = self._cache_key() SSH_CONNECTION_CACHE.pop(cache_key, None) SFTP_CONNECTION_CACHE.pop(cache_key, None) if self.sftp is not None: self.sftp.close() if C.HOST_KEY_CHECKING and C.PARAMIKO_RECORD_HOST_KEYS and self._any_keys_added( ): # add any new SSH host keys -- warning -- this could be slow # (This doesn't acquire the connection lock because it needs # to exclude only other known_hosts writers, not connections # that are starting up.) lockfile = self.keyfile.replace("known_hosts", ".known_hosts.lock") dirname = os.path.dirname(self.keyfile) makedirs_safe(dirname) KEY_LOCK = open(lockfile, 'w') fcntl.lockf(KEY_LOCK, fcntl.LOCK_EX) try: # just in case any were added recently self.ssh.load_system_host_keys() self.ssh._host_keys.update(self.ssh._system_host_keys) # gather information about the current key file, so # we can ensure the new file has the correct mode/owner key_dir = os.path.dirname(self.keyfile) key_stat = os.stat(self.keyfile) # Save the new keys to a temporary file and move it into place # rather than rewriting the file. We set delete=False because # the file will be moved into place rather than cleaned up. tmp_keyfile = tempfile.NamedTemporaryFile(dir=key_dir, delete=False) os.chmod(tmp_keyfile.name, key_stat.st_mode & 0o7777) os.chown(tmp_keyfile.name, key_stat.st_uid, key_stat.st_gid) self._save_ssh_host_keys(tmp_keyfile.name) tmp_keyfile.close() os.rename(tmp_keyfile.name, self.keyfile) except: # unable to save keys, including scenario when key was invalid # and caught earlier traceback.print_exc() pass fcntl.lockf(KEY_LOCK, fcntl.LOCK_UN) self.ssh.close()
def _write_password_file(b_path, content): b_pathdir = os.path.dirname(b_path) makedirs_safe(b_pathdir, mode=0o700) with open(b_path, 'wb') as f: os.chmod(b_path, 0o600) b_content = to_bytes(content, errors='surrogate_or_strict') + b'\n' f.write(b_content)
def set_options(self, task_keys=None, var_options=None, direct=None): super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct) self.log_folder = self.get_option("log_folder") self.log_format = self.get_option("log_format") if not os.path.exists(self.log_folder): makedirs_safe(self.log_folder)
def run(self, terms, variables, **kwargs): ret = [] for term in terms: relpath, params = _parse_parameters(term) # get password or create it if file doesn't exist path = self._loader.path_dwim(relpath) if not os.path.exists(path): pathdir = os.path.dirname(path) try: makedirs_safe(pathdir, mode=0o700) except OSError as e: raise AnsibleError("cannot create the path for the password lookup: %s (error was %s)" % (pathdir, str(e))) chars = "".join(getattr(string, c, c) for c in params['chars']).replace('"', '').replace("'", '') password = ''.join(random.choice(chars) for _ in range(params['length'])) if params['encrypt'] is not None: salt = self.random_salt() content = '%s salt=%s' % (password, salt) else: content = password with open(path, 'w') as f: os.chmod(path, 0o600) f.write(content + '\n') else: content = open(path).read().rstrip() password = content salt = None if params['encrypt'] is not None: try: sep = content.rindex(' ') except ValueError: # No salt pass else: salt_field = content[sep + 1:] if salt_field.startswith('salt='): password = content[:sep] salt = salt_field[len('salt='):] # crypt requested, add salt if missing if not salt: salt = self.random_salt() content = '%s salt=%s' % (password, salt) with open(path, 'w') as f: os.chmod(path, 0o600) f.write(content + '\n') if params['encrypt']: password = do_encrypt(password, params['encrypt'], salt=salt) ret.append(password) return ret
def ensure_type(value, value_type): ''' return a configuration variable with casting :arg value: The value to ensure correct typing of :kwarg value_type: The type of the value. This can be any of the following strings: :boolean: sets the value to a True or False value :integer: Sets the value to an integer or raises a ValueType error :float: Sets the value to a float or raises a ValueType error :list: Treats the value as a comma separated list. Split the value and return it as a python list. :none: Sets the value to None :path: Expands any environment variables and tilde's in the value. :tmp_path: Create a unique temporary directory inside of the directory specified by value and return its path. :pathlist: Treat the value as a typical PATH string. (On POSIX, this means colon separated strings.) Split the value and then expand each part for environment variables and tildes. ''' if value_type: value_type = value_type.lower() if value_type in ('boolean', 'bool'): value = boolean(value, strict=False) elif value: if value_type in ('integer', 'int'): value = int(value) elif value_type == 'float': value = float(value) elif value_type == 'list': if isinstance(value, string_types): value = [x.strip() for x in value.split(',')] elif value_type == 'none': if value == "None": value = None elif value_type == 'path': value = resolve_path(value) elif value_type in ('tmp', 'temppath', 'tmppath'): value = resolve_path(value) if not os.path.exists(value): makedirs_safe(value, 0o700) prefix = 'ansible-local-%s' % os.getpid() value = tempfile.mkdtemp(prefix=prefix, dir=value) elif value_type == 'pathlist': if isinstance(value, string_types): value = [resolve_path(x) for x in value.split(os.pathsep)] # defaults to string types elif isinstance(value, string_types): value = unquote(value) return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def close(self): ''' terminate the connection ''' cache_key = self._cache_key() SSH_CONNECTION_CACHE.pop(cache_key, None) SFTP_CONNECTION_CACHE.pop(cache_key, None) if self.sftp is not None: self.sftp.close() if C.HOST_KEY_CHECKING and C.PARAMIKO_RECORD_HOST_KEYS and self._any_keys_added(): # add any new SSH host keys -- warning -- this could be slow # (This doesn't acquire the connection lock because it needs # to exclude only other known_hosts writers, not connections # that are starting up.) lockfile = self.keyfile.replace("known_hosts",".known_hosts.lock") dirname = os.path.dirname(self.keyfile) makedirs_safe(dirname) KEY_LOCK = open(lockfile, 'w') fcntl.lockf(KEY_LOCK, fcntl.LOCK_EX) try: # just in case any were added recently self.ssh.load_system_host_keys() self.ssh._host_keys.update(self.ssh._system_host_keys) # gather information about the current key file, so # we can ensure the new file has the correct mode/owner key_dir = os.path.dirname(self.keyfile) key_stat = os.stat(self.keyfile) # Save the new keys to a temporary file and move it into place # rather than rewriting the file. We set delete=False because # the file will be moved into place rather than cleaned up. tmp_keyfile = tempfile.NamedTemporaryFile(dir=key_dir, delete=False) os.chmod(tmp_keyfile.name, key_stat.st_mode & 0o7777) os.chown(tmp_keyfile.name, key_stat.st_uid, key_stat.st_gid) self._save_ssh_host_keys(tmp_keyfile.name) tmp_keyfile.close() os.rename(tmp_keyfile.name, self.keyfile) except: # unable to save keys, including scenario when key was invalid # and caught earlier traceback.print_exc() pass fcntl.lockf(KEY_LOCK, fcntl.LOCK_UN) self.ssh.close()
def write_tree_file(self, hostname, buf): ''' write something into treedir/hostname ''' buf = to_bytes(buf) try: makedirs_safe(self.tree) path = os.path.join(self.tree, hostname) with open(path, 'wb+') as fd: fd.write(buf) except (OSError, IOError) as e: self._display.warning("Unable to write to %s's file: %s" % (hostname, str(e)))
def write_tree_file(self, hostname, buf): ''' write something into treedir/hostname ''' buf = to_bytes(buf) try: makedirs_safe(self.tree) path = os.path.join(self.tree, hostname) with open(path, 'wb+') as fd: fd.write(buf) except (OSError, IOError) as e: self._display.warning(u"Unable to write to %s's file: %s" % (hostname, to_text(e)))
def write_tree_file(self, hostname, buf): ''' write something into treedir/hostname ''' try: makedirs_safe(self.tree) path = os.path.join(self.tree, hostname) fd = open(path, "w+") fd.write(buf) fd.close() except (OSError, IOError) as e: self._display.warnings("Unable to write to %s's file: %s" % (hostname, str(e)))
def create_file(self, filename, secret, vault_id=None): """ create a new encrypted file """ dirname = os.path.dirname(filename) if dirname and not os.path.exists(dirname): display.warning(u"%s does not exist, creating..." % to_text(dirname)) makedirs_safe(dirname) # FIXME: If we can raise an error here, we can probably just make it # behave like edit instead. if os.path.isfile(filename): raise AnsibleError("%s exists, please use 'edit' instead" % filename) self._edit_file_helper(filename, secret, vault_id=vault_id)
def write_tree_file(self, hostname, buf): ''' write something into treedir/hostname ''' buf = to_bytes(buf) try: makedirs_safe(self.tree) except (OSError, IOError) as e: self._display.warning(u"Unable to access or create the configured directory (%s): %s" % (to_text(self.tree), to_text(e))) try: path = to_bytes(os.path.join(self.tree, hostname)) with open(path, 'wb+') as fd: fd.write(buf) except (OSError, IOError) as e: self._display.warning(u"Unable to write to %s's file: %s" % (hostname, to_text(e)))
def _generate_retry_inventory(self, retry_path, replay_hosts): ''' Called when a playbook run fails. It generates an inventory which allows re-running on ONLY the failed hosts. This may duplicate some variable information in group_vars/host_vars but that is ok, and expected. ''' try: makedirs_safe(os.path.dirname(retry_path)) with open(retry_path, 'w') as fd: for x in replay_hosts: fd.write("%s\n" % x) except Exception as e: display.warning("Could not create retry file '%s'.\n\t%s" % (retry_path, to_str(e))) return False return True
def get_config(p, section, key, env_var, default, value_type=None, expand_relative_paths=False): ''' return a configuration variable with casting ''' value = _get_config(p, section, key, env_var, default) if value_type == 'boolean': value = mk_boolean(value) elif value: if value_type == 'integer': value = int(value) elif value_type == 'float': value = float(value) elif value_type == 'list': if isinstance(value, string_types): value = [x.strip() for x in value.split(',')] elif value_type == 'none': if value == "None": value = None elif value_type == 'path': value = shell_expand(value) elif value_type == 'tmppath': value = shell_expand(value) if not os.path.exists(value): makedirs_safe(value, 0o700) prefix = 'ansible-local-%s' % os.getpid() value = tempfile.mkdtemp(prefix=prefix, dir=value) elif value_type == 'pathlist': if isinstance(value, string_types): value = [shell_expand(x, expand_relative_paths=expand_relative_paths) \ for x in value.split(os.pathsep)] elif isinstance(value, string_types): value = unquote(value) return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def run(self, tmp=None, task_vars=None): if task_vars is None: task_vars = dict() if self._play_context.check_mode: return dict(skipped=True, msg='check mode not supported for this module') result = super(ActionModule, self).run(tmp, task_vars) cmd = self._task.args.get('cmd', None) stdin = self._task.args.get('stdin', None) dest = self._task.args.get('dest', None) if cmd is None or dest is None: return dict(failed=True, msg="cmd and dest are required") if stdin is not None: stdin = self._connection._shell.join_path(stdin) stdin = self._remote_expand_user(stdin) remote_user = task_vars.get( 'ansible_ssh_user') or self._play_context.remote_user stdout = self._connection._shell.join_path( self._make_tmp_path(remote_user), 'stdout') result.update( self._execute_module(module_args=dict(cmd=cmd, stdin=stdin, dest=stdout), task_vars=task_vars)) # calculate checksum for the local file local_checksum = checksum(dest) # calculate checksum for the remote file, don't bother if using become as slurp will be used remote_checksum = self._remote_checksum(stdout, all_vars=task_vars) if remote_checksum != local_checksum: makedirs_safe(os.path.dirname(dest)) self._connection.fetch_file(stdout, dest) if checksum(dest) == remote_checksum: result.update(dict(changed=True)) else: result.update(dict(failed=True)) return result
def __init__( self, galaxy, name, url, username=None, password=None, token=None, validate_certs=True, available_api_versions=None, clear_response_cache=False, no_cache=True, priority=float('inf'), timeout=60, ): self.galaxy = galaxy self.name = name self.username = username self.password = password self.token = token self.api_server = url self.validate_certs = validate_certs self.timeout = timeout self._available_api_versions = available_api_versions or {} self._priority = priority self._server_timeout = timeout b_cache_dir = to_bytes(C.GALAXY_CACHE_DIR, errors='surrogate_or_strict') makedirs_safe(b_cache_dir, mode=0o700) self._b_cache_path = os.path.join(b_cache_dir, b'api.json') if clear_response_cache: with _CACHE_LOCK: if os.path.exists(self._b_cache_path): display.vvvv("Clearing cache file (%s)" % to_text(self._b_cache_path)) os.remove(self._b_cache_path) self._cache = None if not no_cache: self._cache = _load_cache(self._b_cache_path) display.debug('Validate TLS certificates for %s: %s' % (self.api_server, self.validate_certs))
def _build_command(self, binary, *other_args): ''' Takes a binary (ssh, scp, sftp) and optional extra arguments and returns a command line as an array that can be passed to subprocess.Popen. ''' self._command = [] ## First, the command name. # If we want to use password authentication, we have to set up a pipe to # write the password to sshpass. if self._play_context.password: if not self._sshpass_available(): raise AnsibleError( "to use the 'ssh' connection type with passwords, you must install the sshpass program" ) self.sshpass_pipe = os.pipe() self._command += ['sshpass', '-d{0}'.format(self.sshpass_pipe[0])] self._command += [binary] ## Next, additional arguments based on the configuration. # sftp batch mode allows us to correctly catch failed transfers, but can # be disabled if the client side doesn't support the option. if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE: self._command += ['-b', '-'] self._command += ['-C'] if self._play_context.verbosity > 3: self._command += ['-vvv'] elif binary == 'ssh': # Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q. self._command += ['-q'] # Next, we add [ssh_connection]ssh_args from ansible.cfg. if self._play_context.ssh_args: args = self._split_ssh_args(self._play_context.ssh_args) self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. # BB Mod: Rely on ssh_config's strict host key checking; IOW don't add an explict SSH arg # if not C.HOST_KEY_CHECKING: # self._add_args( # "ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", # ("-o", "StrictHostKeyChecking=no") # ) if self._play_context.port is not None: self._add_args("ANSIBLE_REMOTE_PORT/remote_port/ansible_port set", ("-o", "Port={0}".format(self._play_context.port))) key = self._play_context.private_key_file if key: self._add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key)))) # BB Mod: Stick to authmethods: hostbased,publickey if not self._play_context.password: self._add_args("ansible_password/ansible_ssh_pass not set", ("-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=hostbased,publickey", "-o", "PasswordAuthentication=no")) user = self._play_context.remote_user if user: self._add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set", ("-o", "User={0}".format( to_bytes(self._play_context.remote_user)))) self._add_args( "ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout))) # Add in any common or binary-specific arguments from the PlayContext # (i.e. inventory or task settings or overrides on the command line). for opt in ['ssh_common_args', binary + '_extra_args']: attr = getattr(self._play_context, opt, None) if attr is not None: args = self._split_ssh_args(attr) self._add_args("PlayContext set %s" % opt, args) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(self._command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath('$HOME/.ansible/cp') # The directory must exist and be writable. makedirs_safe(cpdir, 0o700) if not os.access(cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % cpdir) args = ("-o", "ControlPath={0}".format( to_bytes(C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir)))) self._add_args("found only ControlPersist; added ControlPath", args) ## Finally, we add any caller-supplied extras. if other_args: self._command += other_args return self._command
def run(self, terms, variables, **kwargs): res = [] output = {} try: yang_file = terms[0] except IndexError: raise AnsibleError('the yang file must be specified') yang_file = os.path.realpath(os.path.expanduser(yang_file)) if not os.path.isfile(yang_file): raise AnsibleError('%s invalid file path' % yang_file) search_path = kwargs.pop('search_path', '') annotations = kwargs.pop('annotations', '') for path in search_path.split(':'): path = os.path.realpath(os.path.expanduser(path)) if path is not '' and not os.path.isdir(path): raise AnsibleError('%s is invalid directory path' % path) keep_tmp_files = kwargs.pop('keep_tmp_files', False) defaults = kwargs.pop('defaults', False) doctype = kwargs.pop('doctype', 'config') valid_doctype = ['config', 'data'] if doctype not in valid_doctype: raise AnsibleError('doctpe value %s is invalid, valid value are %s' % (path, ', '.join(valid_doctype))) pyang_exec_path = find_file_in_path('pyang') saved_arg = deepcopy(sys.argv) sys.stdout = sys.stderr = StringIO() plugindir = unfrackpath(YANG_SPEC_DIR_PATH) makedirs_safe(plugindir) tree_file_path = os.path.join(YANG_SPEC_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'txt')) xml_file_path = os.path.join(YANG_SPEC_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xml')) json_file_path = os.path.join(YANG_SPEC_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'json')) tree_file_path = os.path.realpath(os.path.expanduser(tree_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang to retrieve xml skeleton sample_xml_skeleton_cmd = [pyang_exec_path, '-f', 'sample-xml-skeleton', '-o', xml_file_path, yang_file, '-p', search_path, "--sample-xml-skeleton-doctype", doctype, "--lax-quote-checks"] if defaults: sample_xml_skeleton_cmd.append("--sample-xml-skeleton-defaults") if annotations: sample_xml_skeleton_cmd.append("--sample-xml-skeleton-annotations") try: subprocess.check_output(' '.join(sample_xml_skeleton_cmd), stderr=subprocess.STDOUT, shell=True) except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating skeleton xml file: %s' % e) finally: err = sys.stdout.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating skeleton xml file: %s' % err) sys.stdout.flush() sys.stderr.flush() # fill in the sys args before invoking pyang to retrieve tree structure tree_cmd = [pyang_exec_path, '-f', 'tree', '-o', tree_file_path, yang_file, '-p', search_path, "--lax-quote-checks"] try: subprocess.check_output(' '.join(tree_cmd), stderr=subprocess.STDOUT, shell=True) except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating tree file: %s' % e) finally: err = sys.stdout.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating tree file: %s' % err) sys.stdout.flush() sys.stderr.flush() plugin_file_src = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'yang_spec.py') shutil.copy(plugin_file_src, plugindir) # fill in the sys args before invoking pyang to retrieve json skeleton sample_json_skeleton_cmd = [pyang_exec_path, '--plugindir', plugindir, '-f', 'sample-json-skeleton', '-o', json_file_path, yang_file, '-p', search_path, '--lax-quote-checks', '--sample-json-skeleton-doctype', doctype] if defaults: sample_json_skeleton_cmd.append("--sample-json-skeleton-defaults") try: subprocess.check_output(' '.join(sample_json_skeleton_cmd), stderr=subprocess.STDOUT, shell=True) except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating skeleton json file: %s' % e) finally: err = sys.stdout.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating tree json: %s' % err) with open(tree_file_path, 'r') as f: output['tree'] = f.read() with open(xml_file_path, 'r') as f: output['xml_skeleton'] = f.read() with open(json_file_path, 'r') as f: output['json_skeleton'] = json.load(f) if not keep_tmp_files: shutil.rmtree(plugindir, ignore_errors=True) res.append(output) sys.argv = saved_arg return res
def fetch_file(self, in_path, out_path): super(Connection, self).fetch_file(in_path, out_path) in_path = self._shell._unquote(in_path) out_path = out_path.replace('\\', '/') display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host) buffer_size = 2**19 # 0.5MB chunks makedirs_safe(os.path.dirname(out_path)) out_file = None try: offset = 0 while True: try: script = ''' $path = "%(path)s" If (Test-Path -Path $path -PathType Leaf) { $buffer_size = %(buffer_size)d $offset = %(offset)d $stream = New-Object -TypeName IO.FileStream($path, [IO.FileMode]::Open, [IO.FileAccess]::Read, [IO.FileShare]::ReadWrite) $stream.Seek($offset, [System.IO.SeekOrigin]::Begin) > $null $buffer = New-Object -TypeName byte[] $buffer_size $bytes_read = $stream.Read($buffer, 0, $buffer_size) if ($bytes_read -gt 0) { $bytes = $buffer[0..($bytes_read - 1)] [System.Convert]::ToBase64String($bytes) } $stream.Close() > $null } ElseIf (Test-Path -Path $path -PathType Container) { Write-Host "[DIR]"; } Else { Write-Error "$path does not exist"; Exit 1; } ''' % dict(buffer_size=buffer_size, path=self._shell._escape(in_path), offset=offset) display.vvvvv('WINRM FETCH "%s" to "%s" (offset=%d)' % (in_path, out_path, offset), host=self._winrm_host) cmd_parts = self._shell._encode_script(script, as_list=True, preserve_rc=False) result = self._winrm_exec(cmd_parts[0], cmd_parts[1:]) if result.status_code != 0: raise IOError(to_native(result.std_err)) if result.std_out.strip() == '[DIR]': data = None else: data = base64.b64decode(result.std_out.strip()) if data is None: makedirs_safe(out_path) break else: if not out_file: # If out_path is a directory and we're expecting a file, bail out now. if os.path.isdir(to_bytes(out_path, errors='surrogate_or_strict')): break out_file = open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb') out_file.write(data) if len(data) < buffer_size: break offset += len(data) except Exception: traceback.print_exc() raise AnsibleError('failed to transfer file to "%s"' % to_native(out_path)) finally: if out_file: out_file.close()
def json_to_xml(self, json_data): """ The method translates JSON data encoded as per YANG model (RFC 7951) to XML payload :param json_data: JSON data that should to translated to XML :return: XML data in string format. """ saved_arg = deepcopy(sys.argv) saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() plugin_instance = str(uuid.uuid4()) plugindir = unfrackpath(JSON2XML_DIR_PATH) makedirs_safe(plugindir) makedirs_safe(os.path.join(plugindir, plugin_instance)) jtox_file_path = os.path.join( JSON2XML_DIR_PATH, plugin_instance, "%s.%s" % (str(uuid.uuid4()), "jtox"), ) xml_file_path = os.path.join( JSON2XML_DIR_PATH, plugin_instance, "%s.%s" % (str(uuid.uuid4()), "xml"), ) jtox_file_path = os.path.realpath(os.path.expanduser(jtox_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) yang_metada_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), "files/yang") yang_metadata_path = os.path.join(yang_metada_dir, "nc-op.yang") self._search_path += ":%s" % yang_metada_dir # fill in the sys args before invoking pyang sys.argv = ([ self._pyang_exec_path, "-f", "jtox", "-o", jtox_file_path, "-p", self._search_path, "--lax-quote-checks", ] + self._yang_files + [yang_metadata_path]) try: self._pyang_exec.run() except SystemExit: pass except Exception as e: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (jtox) file: %s" % e) finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (jtox) file: %s" % err) json2xml_exec_path = find_file_in_path("json2xml") json2xml_exec = imp.load_source("json2xml", json2xml_exec_path) # fill in the sys args before invoking json2xml sys.argv = [ json2xml_exec_path, "-t", self._doctype, "-o", xml_file_path, jtox_file_path, json_data, ] try: json2xml_exec.main() with open(xml_file_path, "r+") as fp: content = fp.read() except SystemExit: pass finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError("Error while translating to xml: %s" % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: content = re.sub(r"<\? ?xml .*\? ?>", "", content) root = etree.fromstring(content) except Exception as e: raise AnsibleError("Error while reading xml document: %s" % e) finally: if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) return etree.tostring(root)
def ensure_type(value, value_type, origin=None): ''' return a configuration variable with casting :arg value: The value to ensure correct typing of :kwarg value_type: The type of the value. This can be any of the following strings: :boolean: sets the value to a True or False value :bool: Same as 'boolean' :integer: Sets the value to an integer or raises a ValueType error :int: Same as 'integer' :float: Sets the value to a float or raises a ValueType error :list: Treats the value as a comma separated list. Split the value and return it as a python list. :none: Sets the value to None :path: Expands any environment variables and tilde's in the value. :tmppath: Create a unique temporary directory inside of the directory specified by value and return its path. :temppath: Same as 'tmppath' :tmp: Same as 'tmppath' :pathlist: Treat the value as a typical PATH string. (On POSIX, this means colon separated strings.) Split the value and then expand each part for environment variables and tildes. :pathspec: Treat the value as a PATH string. Expands any environment variables tildes's in the value. :str: Sets the value to string types. :string: Same as 'str' ''' errmsg = '' basedir = None if origin and os.path.isabs(origin) and os.path.exists(to_bytes(origin)): basedir = origin if value_type: value_type = value_type.lower() if value is not None: if value_type in ('boolean', 'bool'): value = boolean(value, strict=False) elif value_type in ('integer', 'int'): value = int(value) elif value_type == 'float': value = float(value) elif value_type == 'list': if isinstance(value, string_types): value = [x.strip() for x in value.split(',')] elif not isinstance(value, Sequence): errmsg = 'list' elif value_type == 'none': if value == "None": value = None if value is not None: errmsg = 'None' elif value_type == 'path': if isinstance(value, string_types): value = resolve_path(value, basedir=basedir) else: errmsg = 'path' elif value_type in ('tmp', 'temppath', 'tmppath'): if isinstance(value, string_types): value = resolve_path(value, basedir=basedir) if not os.path.exists(value): makedirs_safe(value, 0o700) prefix = 'ansible-local-%s' % os.getpid() value = tempfile.mkdtemp(prefix=prefix, dir=value) atexit.register(cleanup_tmp_file, value, warn=True) else: errmsg = 'temppath' elif value_type == 'pathspec': if isinstance(value, string_types): value = value.split(os.pathsep) if isinstance(value, Sequence): value = [resolve_path(x, basedir=basedir) for x in value] else: errmsg = 'pathspec' elif value_type == 'pathlist': if isinstance(value, string_types): value = [x.strip() for x in value.split(',')] if isinstance(value, Sequence): value = [resolve_path(x, basedir=basedir) for x in value] else: errmsg = 'pathlist' elif value_type in ('str', 'string'): if isinstance(value, (string_types, AnsibleVaultEncryptedUnicode)): value = unquote(to_text(value, errors='surrogate_or_strict')) else: errmsg = 'string' # defaults to string type elif isinstance(value, (string_types, AnsibleVaultEncryptedUnicode)): value = unquote(to_text(value, errors='surrogate_or_strict')) if errmsg: raise ValueError('Invalid type provided for "%s": %s' % (errmsg, to_native(value))) return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def run(self, terms, variables, **kwargs): ret = [] for term in terms: # you can't have escaped spaces in yor pathname params = term.split() relpath = params[0] paramvals = { 'length': DEFAULT_LENGTH, 'encrypt': None, 'chars': ['ascii_letters', 'digits', ".,:-_"], } # get non-default parameters if specified try: for param in params[1:]: name, value = param.split('=') assert (name in paramvals) if name == 'length': paramvals[name] = int(value) elif name == 'chars': use_chars = [] if ",," in value: use_chars.append(',') use_chars.extend(value.replace(',,', ',').split(',')) paramvals['chars'] = use_chars else: paramvals[name] = value except (ValueError, AssertionError) as e: raise AnsibleError(e) length = paramvals['length'] encrypt = paramvals['encrypt'] use_chars = paramvals['chars'] # get password or create it if file doesn't exist path = self._loader.path_dwim(relpath) if not os.path.exists(path): pathdir = os.path.dirname(path) try: makedirs_safe(pathdir, mode=0o700) except OSError as e: raise AnsibleError( "cannot create the path for the password lookup: %s (error was %s)" % (pathdir, str(e))) chars = "".join([getattr(string, c, c) for c in use_chars ]).replace('"', '').replace("'", '') password = ''.join(random.choice(chars) for _ in range(length)) if encrypt is not None: salt = self.random_salt() content = '%s salt=%s' % (password, salt) else: content = password with open(path, 'w') as f: os.chmod(path, 0o600) f.write(content + '\n') else: content = open(path).read().rstrip() sep = content.find(' ') if sep >= 0: password = content[:sep] salt = content[sep + 1:].split('=')[1] else: password = content salt = None # crypt requested, add salt if missing if (encrypt is not None and not salt): salt = self.random_salt() content = '%s salt=%s' % (password, salt) with open(path, 'w') as f: os.chmod(path, 0o600) f.write(content + '\n') # crypt not requested, remove salt if present elif (encrypt is None and salt): with open(path, 'w') as f: os.chmod(path, 0o600) f.write(password + '\n') if encrypt: password = do_encrypt(password, encrypt, salt=salt) ret.append(password) return ret
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: if self._play_context.check_mode: raise AnsibleActionSkip( 'check mode not (yet) supported for this module') source = self._task.args.get('src', None) original_dest = dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat'), strict=False) fail_on_missing = boolean(self._task.args.get( 'fail_on_missing', True), strict=False) validate_checksum = boolean(self._task.args.get( 'validate_checksum', True), strict=False) msg = '' # validate source and dest are strings FIXME: use basic.py and module specs if not isinstance(source, string_types): msg = "Invalid type supplied for source option, it must be a string" if not isinstance(dest, string_types): msg = "Invalid type supplied for dest option, it must be a string" if source is None or dest is None: msg = "src and dest are required" if msg: raise AnsibleActionFail(msg) source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_stat = {} remote_checksum = None if True: # Get checksum for the remote file even using become. Mitogen doesn't need slurp. # Follow symlinks because fetch always follows symlinks try: remote_stat = self._execute_remote_stat(source, all_vars=task_vars, follow=True) except AnsibleError as ae: result['changed'] = False result['file'] = source if fail_on_missing: result['failed'] = True result['msg'] = to_text(ae) else: result['msg'] = "%s, ignored" % to_text( ae, errors='surrogate_or_replace') return result remote_checksum = remote_stat.get('checksum') if remote_stat.get('exists'): if remote_stat.get('isdir'): result['failed'] = True result['changed'] = False result[ 'msg'] = "remote file is a directory, fetch cannot work on directories" # Historically, these don't fail because you may want to transfer # a log file that possibly MAY exist but keep going to fetch other # log files. Today, this is better achieved by adding # ignore_errors or failed_when to the task. Control the behaviour # via fail_when_missing if not fail_on_missing: result['msg'] += ", not transferring, ignored" del result['changed'] del result['failed'] return result # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in (None, '1', ''): slurpres = self._execute_module( module_name='ansible.legacy.slurp', module_args=dict(src=source), task_vars=task_vars) if slurpres.get('failed'): if not fail_on_missing: result['file'] = source result['changed'] = False else: result.update(slurpres) if 'not found' in slurpres.get('msg', ''): result[ 'msg'] = "the remote file does not exist, not transferring, ignored" elif slurpres.get('msg', '').startswith('source is a directory'): result[ 'msg'] = "remote file is a directory, fetch cannot work on directories" return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source # ensure we only use file name, avoid relative paths if not is_subpath(dest, original_dest): # TODO: ? dest = os.path.expanduser(dest.replace(('../',''))) raise AnsibleActionFail( "Detected directory traversal, expected to be contained in '%s' but got '%s'" % (original_dest, dest)) if flat: if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict') ) and not dest.endswith(os.sep): raise AnsibleActionFail( "dest is an existing directory, use a trailing slash if you want to fetch src into that directory" ) if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = os.path.normpath(dest) # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleActionFail( "Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update( dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: result.update({ 'changed': True, 'md5sum': new_md5, 'dest': dest, 'remote_md5sum': None, 'checksum': new_checksum, 'remote_checksum': remote_checksum }) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None result.update( dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) finally: self._remove_tmp_path(self._connection._shell.tmpdir) return result
def fetch_file(self, in_path, out_path): super(Connection, self).fetch_file(in_path, out_path) in_path = self._shell._unquote(in_path) out_path = out_path.replace('\\', '/') display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host) buffer_size = 2**19 # 0.5MB chunks makedirs_safe(os.path.dirname(out_path)) out_file = None try: offset = 0 while True: try: script = ''' If (Test-Path -PathType Leaf "%(path)s") { $stream = [System.IO.File]::OpenRead("%(path)s"); $stream.Seek(%(offset)d, [System.IO.SeekOrigin]::Begin) | Out-Null; $buffer = New-Object Byte[] %(buffer_size)d; $bytesRead = $stream.Read($buffer, 0, %(buffer_size)d); $bytes = $buffer[0..($bytesRead-1)]; [System.Convert]::ToBase64String($bytes); $stream.Close() | Out-Null; } ElseIf (Test-Path -PathType Container "%(path)s") { Write-Host "[DIR]"; } Else { Write-Error "%(path)s does not exist"; Exit 1; } ''' % dict(buffer_size=buffer_size, path=self._shell._escape(in_path), offset=offset) display.vvvvv('WINRM FETCH "%s" to "%s" (offset=%d)' % (in_path, out_path, offset), host=self._winrm_host) cmd_parts = self._shell._encode_script(script, as_list=True) result = self._winrm_exec(cmd_parts[0], cmd_parts[1:]) if result.status_code != 0: raise IOError(to_str(result.std_err)) if result.std_out.strip() == '[DIR]': data = None else: data = base64.b64decode(result.std_out.strip()) if data is None: makedirs_safe(out_path) break else: if not out_file: # If out_path is a directory and we're expecting a file, bail out now. if os.path.isdir(out_path): break out_file = open(out_path, 'wb') out_file.write(data) if len(data) < buffer_size: break offset += len(data) except Exception: traceback.print_exc() raise AnsibleError('failed to transfer file to "%s"' % out_path) finally: if out_file: out_file.close()
def get_config(p, section, key, env_var, default, value_type=None, expand_relative_paths=False): ''' return a configuration variable with casting :arg p: A ConfigParser object to look for the configuration in :arg section: A section of the ini config that should be examined for this section. :arg key: The config key to get this config from :arg env_var: An Environment variable to check for the config var. If this is set to None then no environment variable will be used. :arg default: A default value to assign to the config var if nothing else sets it. :kwarg value_type: The type of the value. This can be any of the following strings: :boolean: sets the value to a True or False value :integer: Sets the value to an integer or raises a ValueType error :float: Sets the value to a float or raises a ValueType error :list: Treats the value as a comma separated list. Split the value and return it as a python list. :none: Sets the value to None :path: Expands any environment variables and tilde's in the value. :tmp_path: Create a unique temporary directory inside of the directory specified by value and return its path. :pathlist: Treat the value as a typical PATH string. (On POSIX, this means colon separated strings.) Split the value and then expand each part for environment variables and tildes. :kwarg expand_relative_paths: for pathlist and path types, if this is set to True then also change any relative paths into absolute paths. The default is False. ''' value = _get_config(p, section, key, env_var, default) if value_type == 'boolean': value = mk_boolean(value) elif value: if value_type == 'integer': value = int(value) elif value_type == 'float': value = float(value) elif value_type == 'list': if isinstance(value, string_types): value = [x.strip() for x in value.split(',')] elif value_type == 'none': if value == "None": value = None elif value_type == 'path': value = shell_expand(value, expand_relative_paths=expand_relative_paths) elif value_type == 'tmppath': value = shell_expand(value) if not os.path.exists(value): makedirs_safe(value, 0o700) prefix = 'ansible-local-%s' % os.getpid() value = tempfile.mkdtemp(prefix=prefix, dir=value) elif value_type == 'pathlist': if isinstance(value, string_types): value = [shell_expand(x, expand_relative_paths=expand_relative_paths) \ for x in value.split(os.pathsep)] elif isinstance(value, string_types): value = unquote(value) return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def _connect(self): ''' connect to the remote host ''' self._display.vvv("ESTABLISH SSH CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr) if self._connected: return self # We start with ansible_ssh_args from the inventory if it's set, # or [ssh_connection]ssh_args from ansible.cfg, or the default # Control* settings. if self.ssh_args: args = self._split_args(self.ssh_args) self.add_args("inventory set ansible_ssh_args", args) elif C.ANSIBLE_SSH_ARGS: args = self._split_args(C.ANSIBLE_SSH_ARGS) self.add_args("ansible.cfg set ssh_args", args) else: args = ( "-o", "ControlMaster=auto", "-o", "ControlPersist=60s" ) self.add_args("default arguments", args) # If any of the above have set ControlPersist but not a # ControlPath, add one ourselves. cp_in_use = False cp_path_set = False for arg in self._common_args: if "ControlPersist" in arg: cp_in_use = True if "ControlPath" in arg: cp_path_set = True if cp_in_use and not cp_path_set: self._cp_dir = unfrackpath('$HOME/.ansible/cp') args = ("-o", "ControlPath=\"{0}\"".format( C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=self._cp_dir)) ) self.add_args("found only ControlPersist; added ControlPath", args) # The directory must exist and be writable. makedirs_safe(self._cp_dir, 0o700) if not os.access(self._cp_dir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % self._cp_dir) if not C.HOST_KEY_CHECKING: self.add_args( "ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", ("-o", "StrictHostKeyChecking=no") ) if self._play_context.port is not None: self.add_args( "ANSIBLE_REMOTE_PORT/remote_port/ansible_ssh_port set", ("-o", "Port={0}".format(self._play_context.port)) ) key = self._play_context.private_key_file if key: self.add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key))) ) if not self._play_context.password: self.add_args( "ansible_password/ansible_ssh_pass not set", ( "-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", "-o", "PasswordAuthentication=no" ) ) user = self._play_context.remote_user if user and user != pwd.getpwuid(os.geteuid())[0]: self.add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_ssh_user/user/-u set", ("-o", "User={0}".format(self._play_context.remote_user)) ) self.add_args( "ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout)) ) # If any extra SSH arguments are specified in the inventory for # this host, or specified as an override on the command line, # add them in. if self._play_context.ssh_extra_args: args = self._split_args(self._play_context.ssh_extra_args) self.add_args("command-line added --ssh-extra-args", args) elif self.ssh_extra_args: args = self._split_args(self.ssh_extra_args) self.add_args("inventory added ansible_ssh_extra_args", args) self._connected = True return self
def run(self, tmp=None, task_vars=dict()): """ handler for fetch operations """ if self._connection_info.check_mode: return dict(skipped=True, msg="check mode not (yet) supported for this module") source = self._task.args.get("src", None) dest = self._task.args.get("dest", None) flat = boolean(self._task.args.get("flat")) fail_on_missing = boolean(self._task.args.get("fail_on_missing")) validate_checksum = boolean(self._task.args.get("validate_checksum", self._task.args.get("validate_md5"))) if "validate_md5" in self._task.args and "validate_checksum" in self._task.args: return dict(failed=True, msg="validate_checksum and validate_md5 cannot both be specified") if source is None or dest is None: return dict(failed=True, msg="src and dest are required") source = self._connection._shell.join_path(source) source = self._remote_expand_user(source, tmp) # calculate checksum for the remote file remote_checksum = self._remote_checksum(tmp, source) # use slurp if sudo and permissions are lacking remote_data = None if remote_checksum in ("1", "2") or self._connection_info.become: slurpres = self._execute_module( module_name="slurp", module_args=dict(src=source), task_vars=task_vars, tmp=tmp ) if slurpres.get("rc") == 0: if slurpres["encoding"] == "base64": remote_data = base64.b64decode(slurpres["content"]) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get("source") if remote_source and remote_source != source: source = remote_source else: # FIXME: should raise an error here? the old code did nothing pass # calculate the destination name if os.path.sep not in self._connection._shell.join_path("a", ""): source_local = source.replace("\\", "/") else: source_local = source dest = os.path.expanduser(dest) if flat: if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if "inventory_hostname" in task_vars: target_name = task_vars["inventory_hostname"] else: target_name = self._connection_info.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//", "/") if remote_checksum in ("0", "1", "2", "3", "4"): # these don't fail because you may want to transfer a log file that possibly MAY exist # but keep going to fetch other log files if remote_checksum == "0": result = dict(msg="unable to calculate the checksum of the remote file", file=source, changed=False) elif remote_checksum == "1": if fail_on_missing: result = dict(failed=True, msg="the remote file does not exist", file=source) else: result = dict( msg="the remote file does not exist, not transferring, ignored", file=source, changed=False ) elif remote_checksum == "2": result = dict( msg="no read permission on remote file, not transferring, ignored", file=source, changed=False ) elif remote_checksum == "3": result = dict( msg="remote file is a directory, fetch cannot work on directories", file=source, changed=False ) elif remote_checksum == "4": result = dict( msg="python isn't present on the system. Unable to compute checksum", file=source, changed=False ) return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: f = open(dest, "w") f.write(remote_data) f.close() new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled # systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: return dict( failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum, ) return dict( changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum, ) else: # For backwards compatibility. We'll return None on FIPS enabled # systems try: local_md5 = md5(dest) except ValueError: local_md5 = None return dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: if self._play_context.check_mode: result['skipped'] = True result['msg'] = 'check mode not (yet) supported for this module' return result source = self._task.args.get('src', None) dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat'), strict=False) fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False) validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5', True)), strict=False) # validate source and dest are strings FIXME: use basic.py and module specs if not isinstance(source, string_types): result['msg'] = "Invalid type supplied for source option, it must be a string" if not isinstance(dest, string_types): result['msg'] = "Invalid type supplied for dest option, it must be a string" # validate_md5 is the deprecated way to specify validate_checksum if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args: result['msg'] = "validate_checksum and validate_md5 cannot both be specified" if 'validate_md5' in self._task.args: display.deprecated('Use validate_checksum instead of validate_md5', version='2.8') if source is None or dest is None: result['msg'] = "src and dest are required" if result.get('msg'): result['failed'] = True return result source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_checksum = None if not self._play_context.become: # calculate checksum for the remote file, don't bother if using become as slurp will be used # Force remote_checksum to follow symlinks because fetch always follows symlinks remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True) # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in ('1', '2', None): slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars) if slurpres.get('failed'): if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'): result['msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False else: result.update(slurpres) return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep): result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory" result['file'] = dest result['failed'] = True return result if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//", "/") if remote_checksum in ('0', '1', '2', '3', '4', '5'): result['changed'] = False result['file'] = source if remote_checksum == '0': result['msg'] = "unable to calculate the checksum of the remote file" elif remote_checksum == '1': result['msg'] = "the remote file does not exist" elif remote_checksum == '2': result['msg'] = "no read permission on remote file" elif remote_checksum == '3': result['msg'] = "remote file is a directory, fetch cannot work on directories" elif remote_checksum == '4': result['msg'] = "python isn't present on the system. Unable to compute checksum" elif remote_checksum == '5': result['msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed" # Historically, these don't fail because you may want to transfer # a log file that possibly MAY exist but keep going to fetch other # log files. Today, this is better achieved by adding # ignore_errors or failed_when to the task. Control the behaviour # via fail_when_missing if fail_on_missing: result['failed'] = True del result['changed'] else: result['msg'] += ", not transferring, ignored" return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleError("Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update(dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: result.update({'changed': True, 'md5sum': new_md5, 'dest': dest, 'remote_md5sum': None, 'checksum': new_checksum, 'remote_checksum': remote_checksum}) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) finally: self._remove_tmp_path(self._connection._shell.tmpdir) return result
def run(self, terms, variables, **kwargs): ret = [] if not isinstance(terms, list): terms = [ terms ] for term in terms: # you can't have escaped spaces in yor pathname params = term.split() relpath = params[0] paramvals = { 'length': DEFAULT_LENGTH, 'encrypt': None, 'chars': ['ascii_letters','digits',".,:-_"], } # get non-default parameters if specified try: for param in params[1:]: name, value = param.split('=') assert(name in paramvals) if name == 'length': paramvals[name] = int(value) elif name == 'chars': use_chars=[] if ",," in value: use_chars.append(',') use_chars.extend(value.replace(',,',',').split(',')) paramvals['chars'] = use_chars else: paramvals[name] = value except (ValueError, AssertionError) as e: raise AnsibleError(e) length = paramvals['length'] encrypt = paramvals['encrypt'] use_chars = paramvals['chars'] # get password or create it if file doesn't exist path = self._loader.path_dwim(relpath) if not os.path.exists(path): pathdir = os.path.dirname(path) try: makedirs_safe(pathdir, mode=0o700) except OSError as e: raise AnsibleError("cannot create the path for the password lookup: %s (error was %s)" % (pathdir, str(e))) chars = "".join([getattr(string,c,c) for c in use_chars]).replace('"','').replace("'",'') password = ''.join(random.choice(chars) for _ in range(length)) if encrypt is not None: salt = self.random_salt() content = '%s salt=%s' % (password, salt) else: content = password with open(path, 'w') as f: os.chmod(path, 0o600) f.write(content + '\n') else: content = open(path).read().rstrip() sep = content.find(' ') if sep >= 0: password = content[:sep] salt = content[sep+1:].split('=')[1] else: password = content salt = None # crypt requested, add salt if missing if (encrypt is not None and not salt): salt = self.random_salt() content = '%s salt=%s' % (password, salt) with open(path, 'w') as f: os.chmod(path, 0o600) f.write(content + '\n') # crypt not requested, remove salt if present elif (encrypt is None and salt): with open(path, 'w') as f: os.chmod(path, 0o600) f.write(password + '\n') if encrypt: password = do_encrypt(password, encrypt, salt=salt) ret.append(password) return ret
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: result = dict(msg="", stderr="", stdout="", file="", md5sum="", dest="", remote_md5sum="", remote_checksum="", checksum="", delta="", failed=False) savf_name = '' created = False is_savf = False savf = '' ifs_created = False backup = False is_lib = False force_save = False flat = False if self._play_context.check_mode: result['skipped'] = True result[ 'msg'] = 'check mode not (yet) supported for this module' return result object_names = self._task.args.get('object_names', '*ALL') lib_name = self._task.args.get('lib_name', None) object_types = self._task.args.get('object_types', '*ALL') is_lib = boolean(self._task.args.get('is_lib', False), strict=True) savefile_name = self._task.args.get('savefile_name', None) force_save = boolean(self._task.args.get('force_save', False), strict=True) backup = boolean(self._task.args.get('backup', False), strict=True) format = self._task.args.get('format', '*SAVF') target_release = self._task.args.get('target_release', '*CURRENT') dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat', False), strict=True) fail_on_missing = boolean(self._task.args.get( 'fail_on_missing', True), strict=True) validate_checksum = boolean(self._task.args.get( 'validate_checksum', True), strict=True) # validate dest are strings FIXME: use basic.py and module specs if not isinstance(dest, string_types): result[ 'msg'] = "Invalid type supplied for dest option, it must be a string. " if lib_name is None or dest is None: result['msg'] = "lib_name and dest are required. " object_names = object_names.upper() object_types = object_types.upper() format = format.upper() target_release = target_release.upper() if lib_name is not None: lib_name = lib_name.upper() if savefile_name is not None: savefile_name = savefile_name.upper() if lib_name == 'QSYS' and (is_lib is True or (object_names == '*ALL' and object_types == '*ALL')): result['msg'] = "QSYS can't be saved." if format != "*SAVF": result['msg'] = "format can only be *SAVF." if result.get('msg'): result['failed'] = True return result startd = datetime.datetime.now() if len(object_names.split()) == 1 and is_lib is not True: if object_types == '*ALL' or object_types == '*FILE': if (object_names.split())[0][-1] == '*': module_args = { 'object_name': object_names[0:-1] + '+', 'lib_name': lib_name, 'use_regex': True } module_output = self._execute_module( module_name='ibmi_object_find', module_args=module_args) save_result = module_output if len(save_result['object_list']) == 1 and save_result['object_list'][0]['OBJTYPE'] == '*FILE' and \ save_result['object_list'][0]['OBJATTRIBUTE'] == 'SAVF': result[ 'msg'] += "Object is a save file, fetch it directly." savf_path = self._calculate_savf_path( save_result['object_list'][0]['OBJNAME'], lib_name) savf_name = save_result['object_list'][0][ 'OBJNAME'] is_savf = True else: module_args = { 'object_name': object_names, 'lib_name': lib_name } module_output = self._execute_module( module_name='ibmi_object_find', module_args=module_args) save_result = module_output if len(save_result['object_list']) == 1 and save_result['object_list'][0]['OBJTYPE'] == '*FILE' and \ save_result['object_list'][0]['OBJATTRIBUTE'] == 'SAVF': result[ 'msg'] += "Object is a save file, fetch it directly." savf_path = self._calculate_savf_path( object_names, lib_name) savf_name = object_names is_savf = True if is_savf is False: savf_name, savf_path = self._calculate_savf_name( object_names, lib_name, is_lib, savefile_name, task_vars, result) if is_lib is True: omitfile = 'OMITOBJ((%s/%s *FILE))' % (lib_name, savf_name) module_args = { 'lib_name': lib_name, 'savefile_name': savf_name, 'savefile_lib': lib_name, 'target_release': target_release, 'force_save': force_save, 'joblog': True, 'parameters': omitfile } module_output = self._execute_module( module_name='ibmi_lib_save', module_args=module_args) else: omitfile = 'OMITOBJ((%s/%s *FILE))' % (lib_name, savf_name) module_args = { 'object_names': object_names, 'object_lib': lib_name, 'object_types': object_types, 'savefile_name': savf_name, 'savefile_lib': lib_name, 'target_release': target_release, 'force_save': force_save, 'joblog': False, 'parameters': omitfile } module_output = self._execute_module( module_name='ibmi_object_save', module_args=module_args) save_result = module_output rc = save_result['rc'] if rc != 0 or ('CPC3708' in save_result['stdout']): result[ 'msg'] = 'Create SAVF failed. See stderr or stdout for more information.' result['failed'] = True result['stderr'] = save_result['stderr_lines'] result['stdout'] = save_result['stdout_lines'] return result created = True display.debug("savf_name = %s, savf_path = %s, force_save=%s" % (savf_name, savf_path, force_save)) source = savf_path commandmk = 'mkdir %s' % ifs_dir command = 'cp %s %s' % (savf_path, ifs_dir) try: module_output = self._execute_module( module_name='command', module_args={'_raw_params': commandmk}) save_result = module_output rc = save_result['rc'] display.debug("save_result['stderr_lines'] = %s" % (save_result['stderr_lines'])) if rc != 0 and ('exists' not in save_result['stderr']): result['msg'] = save_result['msg'] result['failed'] = True result['stderr'] = save_result['stderr_lines'] return result module_output = self._execute_module( module_name='command', module_args={'_raw_params': command}) save_result = module_output rc = save_result['rc'] if rc != 0: result['msg'] = save_result['msg'] result['failed'] = True result['stderr'] = save_result['stderr_lines'] result['stdout'] = save_result['stdout_lines'] return result ifs_created = True except Exception as e: result['msg'] = to_text(e) result['failed'] = True return result source = '%s/%s' % (ifs_dir, os.path.basename(savf_path)) if not isinstance(source, string_types): result[ 'msg'] = "Invalid type supplied for source option, it must be a string" result['failed'] = True return result source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_checksum = None if not self._connection.become: # calculate checksum for the remote file, don't bother if using become as slurp will be used # Force remote_checksum to follow symlinks because fetch always follows symlinks remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True) # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in ('1', '2', None): slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars) if slurpres.get('failed'): if not fail_on_missing and ( slurpres.get('msg').startswith('file not found') or remote_checksum == '1'): result[ 'msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False else: result.update(slurpres) return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) qsys_source = self._connection._shell._unquote(savf_path) source_local = qsys_source.replace('\\', '/') else: source_local = savf_path dest = os.path.expanduser(dest) if flat: if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) base = os.path.basename(source_local) dest = os.path.join(dest, base) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//", "/") if remote_checksum in ('0', '1', '2', '3', '4', '5'): result['changed'] = False result['file'] = source if remote_checksum == '0': result[ 'msg'] = "unable to calculate the checksum of the remote file" elif remote_checksum == '1': result['msg'] = "the remote file does not exist" elif remote_checksum == '2': result['msg'] = "no read permission on remote file" elif remote_checksum == '3': result[ 'msg'] = "remote file is a directory, fetch cannot work on directories" elif remote_checksum == '4': result[ 'msg'] = "python isn't present on the system. Unable to compute checksum" elif remote_checksum == '5': result[ 'msg'] = "stdlib json was not found on the remote machine. Only the raw module can work without those installed" # Historically, these don't fail because you may want to transfer # a log file that possibly MAY exist but keep going to fetch other # log files. Today, this is better achieved by adding # ignore_errors or failed_when to the task. Control the behaviour # via fail_when_missing if fail_on_missing: result['failed'] = True del result['changed'] else: result['msg'] += ", not transferring, ignored" return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleError("Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update( dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=savf, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: endd = datetime.datetime.now() delta = endd - startd if (created is True and backup is True) or is_savf is True: savf = savf_path result['msg'] += " File is renewed on local." result.update({ 'changed': True, 'md5sum': new_md5, 'dest': dest, 'remote_md5sum': None, 'checksum': new_checksum, 'remote_checksum': remote_checksum, 'delta': str(delta), 'file': savf }) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None endd = datetime.datetime.now() delta = endd - startd if (created is True and backup is True) or is_savf is True: savf = savf_path result.update( dict(changed=False, md5sum=local_md5, file=savf, delta=str(delta), dest=dest, checksum=local_checksum)) except Exception as e: result['msg'] += "%s" % to_text(e) result['failed'] = True return result finally: if ((backup is False and is_savf is False) or result['failed'] is True) and created is True: cmd = 'DLTOBJ OBJ(%s/%s) OBJTYPE(*FILE)' % (lib_name, savf_name) module_output = self._execute_module( module_name='ibmi_cl_command', module_args={'cmd': cmd}) save_result = module_output rc = save_result['rc'] if rc != 0 and ('CPF2105' not in save_result['stderr']): result['msg'] += "Failed to delete SAVF on remote" if ifs_created is True: cmd = 'rm %s/%s' % (ifs_dir, os.path.basename(savf_path)) try: module_output = self._execute_module( module_name='command', module_args={'_raw_params': cmd}) save_result = module_output rc = save_result['rc'] if rc != 0: result['msg'] += "Failed to delete IFS on remote" except Exception as e: result[ 'msg'] += "exception happens when delete IFS file. error: %s" % to_text( e) self._remove_tmp_path(self._connection._shell.tmpdir) return result
def ensure_type(value, value_type): ''' return a configuration variable with casting :arg value: The value to ensure correct typing of :kwarg value_type: The type of the value. This can be any of the following strings: :boolean: sets the value to a True or False value :integer: Sets the value to an integer or raises a ValueType error :float: Sets the value to a float or raises a ValueType error :list: Treats the value as a comma separated list. Split the value and return it as a python list. :none: Sets the value to None :path: Expands any environment variables and tilde's in the value. :tmp_path: Create a unique temporary directory inside of the directory specified by value and return its path. :pathlist: Treat the value as a typical PATH string. (On POSIX, this means colon separated strings.) Split the value and then expand each part for environment variables and tildes. ''' if value_type: value_type = value_type.lower() if value_type in ('boolean', 'bool'): value = boolean(value, strict=False) elif value: if value_type in ('integer', 'int'): value = int(value) elif value_type == 'float': value = float(value) elif value_type == 'list': if isinstance(value, string_types): value = [x.strip() for x in value.split(',')] elif value_type == 'none': if value == "None": value = None elif value_type == 'path': value = resolve_path(value) elif value_type in ('tmp', 'temppath', 'tmppath'): value = resolve_path(value) if not os.path.exists(value): makedirs_safe(value, 0o700) prefix = 'ansible-local-%s' % os.getpid() value = tempfile.mkdtemp(prefix=prefix, dir=value) elif value_type == 'pathspec': if isinstance(value, string_types): value = value.split(os.pathsep) value = [resolve_path(x) for x in value] elif value_type == 'pathlist': if isinstance(value, string_types): value = value.split(',') value = [resolve_path(x) for x in value] # defaults to string types elif isinstance(value, string_types): value = unquote(value) return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def _build_command(self, binary, *other_args): ''' Takes a binary (ssh, scp, sftp) and optional extra arguments and returns a command line as an array that can be passed to subprocess.Popen. ''' b_command = [] # # First, the command to invoke # # If we want to use password authentication, we have to set up a pipe to # write the password to sshpass. if self._play_context.password: if not self._sshpass_available(): raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program") self.sshpass_pipe = os.pipe() b_command += [b'sshpass', b'-d' + to_bytes(self.sshpass_pipe[0], nonstring='simplerepr', errors='surrogate_or_strict')] if binary == 'ssh': b_command += [to_bytes(self._play_context.ssh_executable, errors='surrogate_or_strict')] else: b_command += [to_bytes(binary, errors='surrogate_or_strict')] # # Next, additional arguments based on the configuration. # # sftp batch mode allows us to correctly catch failed transfers, but can # be disabled if the client side doesn't support the option. However, # sftp batch mode does not prompt for passwords so it must be disabled # if not using controlpersist and using sshpass if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE: if self._play_context.password: b_args = [b'-o', b'BatchMode=no'] self._add_args(b_command, b_args, u'disable batch mode for sshpass') b_command += [b'-b', b'-'] if self._play_context.verbosity > 3: b_command.append(b'-vvv') # # Next, we add [ssh_connection]ssh_args from ansible.cfg. # if self._play_context.ssh_args: b_args = [to_bytes(a, errors='surrogate_or_strict') for a in self._split_ssh_args(self._play_context.ssh_args)] self._add_args(b_command, b_args, u"ansible.cfg set ssh_args") # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. if not C.HOST_KEY_CHECKING: b_args = (b"-o", b"StrictHostKeyChecking=no") self._add_args(b_command, b_args, u"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled") if self._play_context.port is not None: b_args = (b"-o", b"Port=" + to_bytes(self._play_context.port, nonstring='simplerepr', errors='surrogate_or_strict')) self._add_args(b_command, b_args, u"ANSIBLE_REMOTE_PORT/remote_port/ansible_port set") key = self._play_context.private_key_file if key: b_args = (b"-o", b'IdentityFile="' + to_bytes(os.path.expanduser(key), errors='surrogate_or_strict') + b'"') self._add_args(b_command, b_args, u"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set") if not self._play_context.password: self._add_args( b_command, ( b"-o", b"KbdInteractiveAuthentication=no", b"-o", b"PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", b"-o", b"PasswordAuthentication=no" ), u"ansible_password/ansible_ssh_pass not set" ) user = self._play_context.remote_user if user: self._add_args( b_command, (b"-o", b"User="******"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set" ) self._add_args( b_command, (b"-o", b"ConnectTimeout=" + to_bytes(self._play_context.timeout, errors='surrogate_or_strict', nonstring='simplerepr')), u"ANSIBLE_TIMEOUT/timeout set" ) # Add in any common or binary-specific arguments from the PlayContext # (i.e. inventory or task settings or overrides on the command line). for opt in (u'ssh_common_args', u'{0}_extra_args'.format(binary)): attr = getattr(self._play_context, opt, None) if attr is not None: b_args = [to_bytes(a, errors='surrogate_or_strict') for a in self._split_ssh_args(attr)] self._add_args(b_command, b_args, u"PlayContext set %s" % opt) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(b_command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath(self.control_path_dir) b_cpdir = to_bytes(cpdir, errors='surrogate_or_strict') # The directory must exist and be writable. makedirs_safe(b_cpdir, 0o700) if not os.access(b_cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % to_native(cpdir)) if not self.control_path: self.control_path = self._create_control_path( self.host, self.port, self.user ) b_args = (b"-o", b"ControlPath=" + to_bytes(self.control_path % dict(directory=cpdir), errors='surrogate_or_strict')) self._add_args(b_command, b_args, u"found only ControlPersist; added ControlPath") # Finally, we add any caller-supplied extras. if other_args: b_command += [to_bytes(a) for a in other_args] return b_command
def run(self, tmp=None, task_vars=None): ''' handler for fetch operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) if self._play_context.check_mode: result['skipped'] = True result['msg'] = 'check mode not (yet) supported for this module' return result source = self._task.args.get('src', None) dest = self._task.args.get('dest', None) flat = boolean(self._task.args.get('flat')) fail_on_missing = boolean(self._task.args.get('fail_on_missing')) validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5'))) if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args: result['failed'] = True result['msg'] = "validate_checksum and validate_md5 cannot both be specified" return result if source is None or dest is None: result['failed'] = True result['msg'] = "src and dest are required" return result source = self._connection._shell.join_path(source) source = self._remote_expand_user(source) remote_checksum = None if not self._play_context.become: # calculate checksum for the remote file, don't bother if using become as slurp will be used remote_checksum = self._remote_checksum(source, all_vars=task_vars) # use slurp if permissions are lacking or privilege escalation is needed remote_data = None if remote_checksum in ('1', '2', None): slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp) if slurpres.get('failed'): if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'): result['msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False else: result.update(slurpres) return result else: if slurpres['encoding'] == 'base64': remote_data = base64.b64decode(slurpres['content']) if remote_data is not None: remote_checksum = checksum_s(remote_data) # the source path may have been expanded on the # target system, so we compare it here and use the # expanded version if it's different remote_source = slurpres.get('source') if remote_source and remote_source != source: source = remote_source # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source dest = os.path.expanduser(dest) if flat: if dest.endswith(os.sep): # if the path ends with "/", we'll use the source filename as the # destination filename base = os.path.basename(source_local) dest = os.path.join(dest, base) if not dest.startswith("/"): # if dest does not start with "/", we'll assume a relative path dest = self._loader.path_dwim(dest) else: # files are saved in dest dir, with a subdir for each host, then the filename if 'inventory_hostname' in task_vars: target_name = task_vars['inventory_hostname'] else: target_name = self._play_context.remote_addr dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = dest.replace("//","/") if remote_checksum in ('0', '1', '2', '3', '4'): # these don't fail because you may want to transfer a log file that # possibly MAY exist but keep going to fetch other log files if remote_checksum == '0': result['msg'] = "unable to calculate the checksum of the remote file" result['file'] = source result['changed'] = False elif remote_checksum == '1': if fail_on_missing: result['failed'] = True result['msg'] = "the remote file does not exist" result['file'] = source else: result['msg'] = "the remote file does not exist, not transferring, ignored" result['file'] = source result['changed'] = False elif remote_checksum == '2': result['msg'] = "no read permission on remote file, not transferring, ignored" result['file'] = source result['changed'] = False elif remote_checksum == '3': result['msg'] = "remote file is a directory, fetch cannot work on directories" result['file'] = source result['changed'] = False elif remote_checksum == '4': result['msg'] = "python isn't present on the system. Unable to compute checksum" result['file'] = source result['changed'] = False return result # calculate checksum for the local file local_checksum = checksum(dest) if remote_checksum != local_checksum: # create the containing directories, if needed makedirs_safe(os.path.dirname(dest)) # fetch the file and check for changes if remote_data is None: self._connection.fetch_file(source, dest) else: try: f = open(to_bytes(dest, errors='strict'), 'w') f.write(remote_data) f.close() except (IOError, OSError) as e: raise AnsibleError("Failed to fetch the file: %s" % e) new_checksum = secure_hash(dest) # For backwards compatibility. We'll return None on FIPS enabled systems try: new_md5 = md5(dest) except ValueError: new_md5 = None if validate_checksum and new_checksum != remote_checksum: result.update(dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: result.update(dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)) else: # For backwards compatibility. We'll return None on FIPS enabled systems try: local_md5 = md5(dest) except ValueError: local_md5 = None result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) return result
def _build_command(self, binary, *other_args): self._command = [] self._command += [binary] self._command += ['-C'] if self._play_context.verbosity > 3: self._command += ['-vvv'] elif binary == 'ssh': # Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q. self._command += ['-q'] # Next, we add [ssh_connection]ssh_args from ansible.cfg. # if self._play_context.ssh_args: # args = self._split_args(self._play_context.ssh_args) # self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. if not C.HOST_KEY_CHECKING: self._add_args( "ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", ("-o", "StrictHostKeyChecking=no") ) if self._play_context.port is not None: self._add_args( "ANSIBLE_REMOTE_PORT/remote_port/ansible_port set", ("-o", "Port={0}".format(self._play_context.port)) ) key = self._play_context.private_key_file if key: self._add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key))) ) if not self._play_context.password: self._add_args( "ansible_password/ansible_ssh_pass not set", ( "-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", "-o", "PasswordAuthentication=no" ) ) user = self._play_context.remote_user if user: self._add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set", ("-o", "User={0}".format(to_bytes(self._play_context.remote_user))) ) self._add_args( "ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout)) ) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(self._command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath('$HOME/.ansible/cp') # The directory must exist and be writable. makedirs_safe(cpdir, 0o700) if not os.access(cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % cpdir) args = ("-o", "ControlPath={0}".format( to_bytes(C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir))) ) self._add_args("found only ControlPersist; added ControlPath", args) ## Finally, we add any caller-supplied extras. if other_args: self._command += other_args return self._command
def xml_to_json(self, xml_data): """ The method translates XML data to JSON data encoded as per YANG model (RFC 7951) :param xml_data: XML data or file path containing xml data that should to translated to JSON :return: data in JSON format. """ plugindir = unfrackpath(XM2JSONL_DIR_PATH) makedirs_safe(plugindir) if os.path.isfile(xml_data): # input is xml file path xml_file_path = os.path.realpath(os.path.expanduser(xml_data)) else: # input is xml string, copy it to file in temporary location xml_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "xml")) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) with open(xml_file_path, "w") as f: if not xml_data.startswith("<?xml version"): xml_data = ('<?xml version="1.0" encoding="UTF-8"?>\n' + xml_data) data = xml_data f.write(data) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) try: # validate xml etree.parse(xml_file_path) display.vvvv("Parsing xml data from temporary file: %s" % xml_file_path) except Exception as exc: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError("Failed to load xml data: %s" % (to_text(exc, errors="surrogate_or_strict"))) base_pyang_path = sys.modules["pyang"].__file__ pyang_exec_path = find_file_in_path("pyang") pyang_exec = imp.load_source("pyang", pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules["pyang"].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() xsl_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "xsl")) json_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "json")) xls_file_path = os.path.realpath(os.path.expanduser(xsl_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, "-f", "jsonxsl", "-o", xls_file_path, "-p", self._search_path, "--lax-quote-checks", ] + self._yang_files display.display( "Generating xsl file '%s' by executing command '%s'" % (xls_file_path, " ".join(sys.argv)), log_only=True, ) try: pyang_exec.run() except SystemExit: pass except Exception as e: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (xsl) file: %s" % e) finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError( "Error while generating (xsl) intermediate file: %s" % err) xsltproc_exec_path = find_file_in_path("xsltproc") # fill in the sys args before invoking xsltproc sys.argv = [ xsltproc_exec_path, "-o", json_file_path, xsl_file_path, xml_file_path, ] display.display( "Generating json data in temp file '%s' by executing command '%s'" % (json_file_path, " ".join(sys.argv)), log_only=True, ) time.sleep(5) try: os.system(" ".join(sys.argv)) except SystemExit: pass finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError("Error while translating to json: %s" % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: display.vvvv("Reading output json data from temporary file: %s" % json_file_path) with open(json_file_path, "r") as fp: raw_content = fp.read() content = json.loads(raw_content) except Exception as e: raise AnsibleError( "Error while reading json document %s with content %s" % (e, raw_content)) finally: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) return content
def fetch_file(self, in_path, out_path): super(Connection, self).fetch_file(in_path, out_path) display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._psrp_host) in_path = self._shell._unquote(in_path) out_path = out_path.replace('\\', '/') # because we are dealing with base64 data we need to get the max size # of the bytes that the base64 size would equal max_b64_size = int(self.runspace.connection.max_payload_size - (self.runspace.connection.max_payload_size / 4 * 3)) buffer_size = max_b64_size - (max_b64_size % 1024) # setup the file stream with read only mode setup_script = '''$ErrorActionPreference = "Stop" $path = "%s" if (Test-Path -Path $path -PathType Leaf) { $fs = New-Object -TypeName System.IO.FileStream -ArgumentList @( $path, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read, [System.IO.FileShare]::Read ) $buffer_size = %d } elseif (Test-Path -Path $path -PathType Container) { Write-Output -InputObject "[DIR]" } else { Write-Error -Message "$path does not exist" $host.SetShouldExit(1) }''' % (self._shell._escape(in_path), buffer_size) # read the file stream at the offset and return the b64 string read_script = '''$ErrorActionPreference = "Stop" $fs.Seek(%d, [System.IO.SeekOrigin]::Begin) > $null $buffer = New-Object -TypeName byte[] -ArgumentList $buffer_size $bytes_read = $fs.Read($buffer, 0, $buffer_size) if ($bytes_read -gt 0) { $bytes = $buffer[0..($bytes_read - 1)] Write-Output -InputObject ([System.Convert]::ToBase64String($bytes)) }''' # need to run the setup script outside of the local scope so the # file stream stays active between fetch operations rc, stdout, stderr = self._exec_psrp_script(setup_script, use_local_scope=False) if rc != 0: raise AnsibleError( "failed to setup file stream for fetch '%s': %s" % (out_path, to_native(stderr))) elif stdout.strip() == '[DIR]': # in_path was a dir so we need to create the dir locally makedirs_safe(out_path) return b_out_path = to_bytes(out_path, errors='surrogate_or_strict') makedirs_safe(os.path.dirname(b_out_path)) offset = 0 with open(b_out_path, 'wb') as out_file: while True: display.vvvvv("PSRP FETCH %s to %s (offset=%d" % (in_path, out_path, offset), host=self._psrp_host) rc, stdout, stderr = \ self._exec_psrp_script(read_script % offset) if rc != 0: raise AnsibleError("failed to transfer file to '%s': %s" % (out_path, to_native(stderr))) data = base64.b64decode(stdout.strip()) out_file.write(data) if len(data) < buffer_size: break rc, stdout, stderr = self._exec_psrp_script("$fs.Close()") if rc != 0: display.warning("failed to close remote file stream of file " "'%s': %s" % (in_path, to_native(stderr)))
def fetch_file(self, in_path, out_path): super(Connection, self).fetch_file(in_path, out_path) in_path = self._shell._unquote(in_path) out_path = out_path.replace('\\', '/') display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host) buffer_size = 2**19 # 0.5MB chunks makedirs_safe(os.path.dirname(out_path)) out_file = None try: offset = 0 while True: try: script = ''' If (Test-Path -PathType Leaf "%(path)s") { $stream = New-Object IO.FileStream("%(path)s", [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read, [IO.FileShare]::ReadWrite); $stream.Seek(%(offset)d, [System.IO.SeekOrigin]::Begin) | Out-Null; $buffer = New-Object Byte[] %(buffer_size)d; $bytesRead = $stream.Read($buffer, 0, %(buffer_size)d); $bytes = $buffer[0..($bytesRead-1)]; [System.Convert]::ToBase64String($bytes); $stream.Close() | Out-Null; } ElseIf (Test-Path -PathType Container "%(path)s") { Write-Host "[DIR]"; } Else { Write-Error "%(path)s does not exist"; Exit 1; } ''' % dict(buffer_size=buffer_size, path=self._shell._escape(in_path), offset=offset) display.vvvvv('WINRM FETCH "%s" to "%s" (offset=%d)' % (in_path, out_path, offset), host=self._winrm_host) cmd_parts = self._shell._encode_script(script, as_list=True, preserve_rc=False) result = self._winrm_exec(cmd_parts[0], cmd_parts[1:]) if result.status_code != 0: raise IOError(to_native(result.std_err)) if result.std_out.strip() == '[DIR]': data = None else: data = base64.b64decode(result.std_out.strip()) if data is None: makedirs_safe(out_path) break else: if not out_file: # If out_path is a directory and we're expecting a file, bail out now. if os.path.isdir( to_bytes(out_path, errors='surrogate_or_strict')): break out_file = open( to_bytes(out_path, errors='surrogate_or_strict'), 'wb') out_file.write(data) if len(data) < buffer_size: break offset += len(data) except Exception: traceback.print_exc() raise AnsibleError('failed to transfer file to "%s"' % out_path) finally: if out_file: out_file.close()
def _build_command(self, binary, *other_args): """ Takes a binary (ssh, scp, sftp) and optional extra arguments and returns a command line as an array that can be passed to subprocess.Popen. """ self._command = [] ## First, the command name. # If we want to use password authentication, we have to set up a pipe to # write the password to sshpass. if self._play_context.password: if not self._sshpass_available(): raise AnsibleError( "to use the 'ssh' connection type with passwords, you must install the sshpass program" ) self.sshpass_pipe = os.pipe() self._command += ["sshpass", "-d{0}".format(self.sshpass_pipe[0])] self._command += [binary] ## Next, additional arguments based on the configuration. # sftp batch mode allows us to correctly catch failed transfers, but can # be disabled if the client side doesn't support the option. However, # sftp batch mode does not prompt for passwords so it must be disabled # if not using controlpersist and using sshpass if binary == "sftp" and C.DEFAULT_SFTP_BATCH_MODE: if self._play_context.password: self._add_args("disable batch mode for sshpass", ["-o", "BatchMode=no"]) self._command += ["-b", "-"] if self._play_context.verbosity > 3: self._command += ["-vvv"] elif binary == "ssh": # Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q. self._command += ["-q"] # Next, we add [ssh_connection]ssh_args from ansible.cfg. if self._play_context.ssh_args: args = self._split_ssh_args(self._play_context.ssh_args) self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. if not C.HOST_KEY_CHECKING: self._add_args("ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", ("-o", "StrictHostKeyChecking=no")) if self._play_context.port is not None: self._add_args( "ANSIBLE_REMOTE_PORT/remote_port/ansible_port set", ("-o", "Port={0}".format(self._play_context.port)) ) key = self._play_context.private_key_file if key: self._add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", 'IdentityFile="{0}"'.format(os.path.expanduser(key))), ) if not self._play_context.password: self._add_args( "ansible_password/ansible_ssh_pass not set", ( "-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", "-o", "PasswordAuthentication=no", ), ) user = self._play_context.remote_user if user: self._add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set", ("-o", "User={0}".format(to_bytes(self._play_context.remote_user))), ) self._add_args("ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout))) # Add in any common or binary-specific arguments from the PlayContext # (i.e. inventory or task settings or overrides on the command line). for opt in ["ssh_common_args", binary + "_extra_args"]: attr = getattr(self._play_context, opt, None) if attr is not None: args = self._split_ssh_args(attr) self._add_args("PlayContext set %s" % opt, args) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(self._command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath("$HOME/.ansible/cp") # The directory must exist and be writable. makedirs_safe(cpdir, 0o700) if not os.access(cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % cpdir) args = ("-o", "ControlPath={0}".format(to_bytes(C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir)))) self._add_args("found only ControlPersist; added ControlPath", args) ## Finally, we add any caller-supplied extras. if other_args: self._command += other_args return self._command
def run(self, terms, variables, **kwargs): res = [] try: json_config = terms[0] except IndexError: raise AnsibleError("path to json file must be specified") try: yang_file = kwargs['yang_file'] except KeyError: raise AnsibleError("value of 'yang_file' must be specified") yang_file = os.path.realpath(os.path.expanduser(yang_file)) if not os.path.isfile(yang_file): # Maybe we are passing a glob? yang_files = glob.glob(yang_file) if not yang_files: # Glob returned no files raise AnsibleError('%s invalid file path' % yang_file) else: yang_files = [yang_file] search_path = kwargs.pop('search_path', '') keep_tmp_files = kwargs.pop('keep_tmp_files', False) abs_search_path = None for path in search_path.split(':'): path = os.path.realpath(os.path.expanduser(path)) if abs_search_path is None: abs_search_path = path else: abs_search_path += ':' + path if path is not '' and not os.path.isdir(path): raise AnsibleError('%s is invalid directory path' % path) search_path = abs_search_path json_config = os.path.realpath(os.path.expanduser(json_config)) try: # validate json with open(json_config) as fp: json.load(fp) except Exception as exc: raise AnsibleError("Failed to load json configuration: %s" % (to_text(exc, errors='surrogate_or_strict'))) root_node = kwargs.get('root', 'config') base_pyang_path = sys.modules['pyang'].__file__ pyang_exec_path = find_file_in_path('pyang') pyang_exec = imp.load_source('pyang', pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules['pyang'].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() plugindir = unfrackpath(JSON2XML_DIR_PATH) makedirs_safe(plugindir) jtox_file_path = os.path.join(JSON2XML_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'jtox')) xml_file_path = os.path.join(JSON2XML_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xml')) jtox_file_path = os.path.realpath(os.path.expanduser(jtox_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, '-f', 'jtox', '-o', jtox_file_path, '-p', search_path, "--lax-quote-checks" ] + yang_files try: pyang_exec.run() except SystemExit: pass except Exception as e: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (jtox) file: %s' % e) finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (jtox) file: %s' % err) json2xml_exec_path = find_file_in_path('json2xml') json2xml_exec = imp.load_source('json2xml', json2xml_exec_path) # fill in the sys args before invoking json2xml sys.argv = [ json2xml_exec_path, '-t', root_node, '-o', xml_file_path, jtox_file_path, json_config ] try: json2xml_exec.main() with open(xml_file_path, 'r+') as fp: content = fp.read() except SystemExit: pass finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while translating to xml: %s' % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: content = re.sub(r'<\? ?xml .*\? ?>', '', content) root = etree.fromstring(content) except Exception as e: raise AnsibleError('Error while reading xml document: %s' % e) finally: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) res.append(etree.tostring(root)) return res
def _build_command(self, binary, *other_args): ''' Takes a binary (ssh, scp, sftp) and optional extra arguments and returns a command line as an array that can be passed to subprocess.Popen. ''' b_command = [] # # First, the command to invoke # # If we want to use password authentication, we have to set up a pipe to # write the password to sshpass. if self._play_context.password: if not self._sshpass_available(): raise AnsibleError( "to use the 'ssh' connection type with passwords, you must install the sshpass program" ) self.sshpass_pipe = os.pipe() b_command += [ b'sshpass', b'-d' + to_bytes(self.sshpass_pipe[0], nonstring='simplerepr', errors='surrogate_or_strict') ] b_command += [to_bytes(binary, errors='surrogate_or_strict')] # # Next, additional arguments based on the configuration. # # sftp batch mode allows us to correctly catch failed transfers, but can # be disabled if the client side doesn't support the option. However, # sftp batch mode does not prompt for passwords so it must be disabled # if not using controlpersist and using sshpass if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE: if self._play_context.password: b_args = [b'-o', b'BatchMode=no'] self._add_args(b_command, b_args, u'disable batch mode for sshpass') b_command += [b'-b', b'-'] if self._play_context.verbosity > 3: b_command.append(b'-vvv') # # Next, we add [ssh_connection]ssh_args from ansible.cfg. # if self._play_context.ssh_args: b_args = [ to_bytes(a, errors='surrogate_or_strict') for a in self._split_ssh_args(self._play_context.ssh_args) ] self._add_args(b_command, b_args, u"ansible.cfg set ssh_args") # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. if not C.HOST_KEY_CHECKING: b_args = (b"-o", b"StrictHostKeyChecking=no") self._add_args( b_command, b_args, u"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled") if self._play_context.port is not None: b_args = (b"-o", b"Port=" + to_bytes(self._play_context.port, nonstring='simplerepr', errors='surrogate_or_strict')) self._add_args( b_command, b_args, u"ANSIBLE_REMOTE_PORT/remote_port/ansible_port set") key = self._play_context.private_key_file if key: b_args = (b"-o", b'IdentityFile="' + to_bytes( os.path.expanduser(key), errors='surrogate_or_strict') + b'"') self._add_args( b_command, b_args, u"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set" ) if not self._play_context.password: self._add_args(b_command, ( b"-o", b"KbdInteractiveAuthentication=no", b"-o", b"PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", b"-o", b"PasswordAuthentication=no"), u"ansible_password/ansible_ssh_pass not set") user = self._play_context.remote_user if user: self._add_args( b_command, (b"-o", b"User="******"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set") self._add_args( b_command, (b"-o", b"ConnectTimeout=" + to_bytes(self._play_context.timeout, errors='surrogate_or_strict', nonstring='simplerepr')), u"ANSIBLE_TIMEOUT/timeout set") # Add in any common or binary-specific arguments from the PlayContext # (i.e. inventory or task settings or overrides on the command line). for opt in (u'ssh_common_args', u'{0}_extra_args'.format(binary)): attr = getattr(self._play_context, opt, None) if attr is not None: b_args = [ to_bytes(a, errors='surrogate_or_strict') for a in self._split_ssh_args(attr) ] self._add_args(b_command, b_args, u"PlayContext set %s" % opt) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(b_command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath(u'$HOME/.ansible/cp') b_cpdir = to_bytes(cpdir, errors='surrogate_or_strict') # The directory must exist and be writable. makedirs_safe(b_cpdir, 0o700) if not os.access(b_cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % to_native(cpdir)) b_args = (b"-o", b"ControlPath=" + to_bytes( C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir), errors='surrogate_or_strict')) self._add_args( b_command, b_args, u"found only ControlPersist; added ControlPath") # Finally, we add any caller-supplied extras. if other_args: b_command += [to_bytes(a) for a in other_args] return b_command
def run(self, terms, variables, **kwargs): res = [] try: xml_data = terms[0] except IndexError: raise AnsibleError( "Either xml string or path to xml file must be specified") try: yang_file = kwargs['yang_file'] except KeyError: raise AnsibleError("value of 'yang_file' must be specified") yang_file = os.path.realpath(os.path.expanduser(yang_file)) if not os.path.isfile(yang_file): # Maybe we are passing a glob? yang_files = glob.glob(yang_file) if not yang_files: # Glob returned no files raise AnsibleError('%s invalid file path' % yang_file) else: yang_files = [yang_file] search_path = kwargs.pop('search_path', '') keep_tmp_files = kwargs.pop('keep_tmp_files', False) abs_search_path = None for path in search_path.split(':'): path = os.path.realpath(os.path.expanduser(path)) if abs_search_path is None: abs_search_path = path else: abs_search_path += ':' + path if path != '' and not os.path.isdir(path): raise AnsibleError('%s is invalid directory path' % path) search_path = abs_search_path plugindir = unfrackpath(XM2JSONL_DIR_PATH) makedirs_safe(plugindir) if os.path.isfile(xml_data): # input is xml file path xml_file_path = xml_data else: # input is xml string, copy it to file in temporary location xml_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xml')) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) with open(xml_file_path, 'w') as f: if not xml_data.startswith('<?xml version'): xml_data = '<?xml version="1.0" encoding="UTF-8"?>\n' + xml_data data = xml_data f.write(data) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) try: # validate xml etree.parse(xml_file_path) display.vvvv("Parsing xml data from temporary file: %s" % xml_file_path) except Exception as exc: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError("Failed to load xml data: %s" % (to_text(exc, errors='surrogate_or_strict'))) base_pyang_path = sys.modules['pyang'].__file__ pyang_exec_path = find_file_in_path('pyang') pyang_exec = imp.load_source('pyang', pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules['pyang'].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() xsl_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xsl')) json_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'json')) xls_file_path = os.path.realpath(os.path.expanduser(xsl_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, '-f', 'jsonxsl', '-o', xls_file_path, '-p', search_path, "--lax-quote-checks" ] + yang_files display.display("Generating xsl file '%s' by executing command '%s'" % (xls_file_path, ' '.join(sys.argv)), log_only=True) try: pyang_exec.run() except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (xsl) file: %s' % e) finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating (xsl) intermediate file: %s' % err) xsltproc_exec_path = find_file_in_path('xsltproc') # fill in the sys args before invoking xsltproc sys.argv = [ xsltproc_exec_path, '-o', json_file_path, xsl_file_path, xml_file_path ] display.display( "Generating json data in temp file '%s' by executing command '%s'" % (json_file_path, ' '.join(sys.argv)), log_only=True) time.sleep(5) try: os.system(' '.join(sys.argv)) except SystemExit: pass finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while translating to json: %s' % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: display.vvvv("Reading output json data from temporary file: %s" % json_file_path) with open(json_file_path) as fp: content = json.load(fp) except Exception as e: raise AnsibleError('Error while reading json document: %s' % e) finally: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) res.append(content) return res
def get_config(p, section, key, env_var, default, value_type=None, expand_relative_paths=False): ''' return a configuration variable with casting :arg p: A ConfigParser object to look for the configuration in :arg section: A section of the ini config that should be examined for this section. :arg key: The config key to get this config from :arg env_var: An Environment variable to check for the config var. If this is set to None then no environment variable will be used. :arg default: A default value to assign to the config var if nothing else sets it. :kwarg value_type: The type of the value. This can be any of the following strings: :boolean: sets the value to a True or False value :integer: Sets the value to an integer or raises a ValueType error :float: Sets the value to a float or raises a ValueType error :list: Treats the value as a comma separated list. Split the value and return it as a python list. :none: Sets the value to None :path: Expands any environment variables and tilde's in the value. :tmp_path: Create a unique temporary directory inside of the dirctory specified by value and return its path. :pathlist: Treat the value as a typical PATH string. (On POSIX, this means colon separated strings.) Split the value and then expand each part for environment variables and tildes. :kwarg expand_relative_paths: for pathlist and path types, if this is set to True then also change any relative paths into absolute paths. The default is False. ''' value = _get_config(p, section, key, env_var, default) if value_type == 'boolean': value = mk_boolean(value) elif value: if value_type == 'integer': value = int(value) elif value_type == 'float': value = float(value) elif value_type == 'list': if isinstance(value, string_types): value = [x.strip() for x in value.split(',')] elif value_type == 'none': if value == "None": value = None elif value_type == 'path': value = shell_expand(value, expand_relative_paths=expand_relative_paths) elif value_type == 'tmppath': value = shell_expand(value) if not os.path.exists(value): makedirs_safe(value, 0o700) prefix = 'ansible-local-%s' % os.getpid() value = tempfile.mkdtemp(prefix=prefix, dir=value) elif value_type == 'pathlist': if isinstance(value, string_types): value = [shell_expand(x, expand_relative_paths=expand_relative_paths) \ for x in value.split(os.pathsep)] elif isinstance(value, string_types): value = unquote(value) return to_text(value, errors='surrogate_or_strict', nonstring='passthru')