Exemple #1
0
    def put_file(self, in_path, out_path):
        ''' transfer a file from local to remote '''

        super(Connection, self).put_file(in_path, out_path)

        display.vvv(u"PUT {0} TO {1}".format(in_path, out_path),
                    host=self.host)
        if not os.path.exists(to_bytes(in_path, errors='strict')):
            raise AnsibleFileNotFound(
                "file or module does not exist: {0}".format(to_str(in_path)))

        # scp and sftp require square brackets for IPv6 addresses, but
        # accept them for hostnames and IPv4 addresses too.
        host = '[%s]' % self.host

        if C.DEFAULT_SCP_IF_SSH:
            cmd = self._build_command(
                'scp', in_path, u'{0}:{1}'.format(host, pipes.quote(out_path)))
            in_data = None
        else:
            cmd = self._build_command('sftp', to_bytes(host))
            in_data = u"put {0} {1}\n".format(pipes.quote(in_path),
                                              pipes.quote(out_path))

        in_data = to_bytes(in_data, nonstring='passthru')
        (returncode, stdout, stderr) = self._run(cmd, in_data)

        if returncode != 0:
            raise AnsibleError(
                "failed to transfer file to {0}:\n{1}\n{2}".format(
                    to_str(out_path), to_str(stdout), to_str(stderr)))
    def _get_server_api_version(self):
        """
        Fetches the Galaxy API current version to ensure
        the API server is up and reachable.
        """
        url = '%s/api/' % self._api_server
        try:
            return_data = open_url(url, validate_certs=self._validate_certs)
        except Exception as e:
            raise AnsibleError(
                "Failed to get data from the API server (%s): %s " %
                (url, to_str(e)))

        try:
            data = json.load(return_data)
        except Exception as e:
            raise AnsibleError(
                "Could not process data from the API server (%s): %s " %
                (url, to_str(e)))

        if not 'current_version' in data:
            raise AnsibleError(
                "missing required 'current_version' from server response (%s)"
                % url)

        return data['current_version']
    def put_file(self, in_path, out_path):
        """ transfer a file from local to remote """

        super(Connection, self).put_file(in_path, out_path)

        display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self.host)
        if not os.path.exists(to_bytes(in_path, errors="strict")):
            raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_str(in_path)))

        # scp and sftp require square brackets for IPv6 addresses, but
        # accept them for hostnames and IPv4 addresses too.
        host = "[%s]" % self.host

        if C.DEFAULT_SCP_IF_SSH:
            cmd = self._build_command("scp", in_path, u"{0}:{1}".format(host, pipes.quote(out_path)))
            in_data = None
        else:
            cmd = self._build_command("sftp", to_bytes(host))
            in_data = u"put {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))

        in_data = to_bytes(in_data, nonstring="passthru")
        (returncode, stdout, stderr) = self._run(cmd, in_data)

        if returncode != 0:
            raise AnsibleError(
                "failed to transfer file to {0}:\n{1}\n{2}".format(to_str(out_path), to_str(stdout), to_str(stderr))
            )
Exemple #4
0
    def complete_cd(self, text, line, begidx, endidx):
        mline = line.partition(' ')[2]
        offs = len(mline) - len(text)

        if self.options.cwd in ('all','*','\\'):
            completions = self.hosts + self.groups
        else:
            completions = [x.name for x in self.inventory.list_hosts(self.options.cwd)]

        return [to_str(s)[offs:] for s in completions if to_str(s).startswith(to_str(mline))]
    def __init__(self, message="", obj=None, show_content=True):
        # we import this here to prevent an import loop problem,
        # since the objects code also imports ansible.errors
        from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject

        self._obj = obj
        self._show_content = show_content
        if obj and isinstance(obj, AnsibleBaseYAMLObject):
            extended_error = self._get_extended_error()
            if extended_error:
                self.message = '%s\n\n%s' % (to_str(message), to_str(extended_error))
        else:
            self.message = '%s' % to_str(message)
Exemple #6
0
    def put_file(self, in_path, out_path):
        ''' transfer a file from local to local '''

        super(Connection, self).put_file(in_path, out_path)

        display.vvv(u"{0} PUT {1} TO {2}".format(self._play_context.remote_addr, in_path, out_path))
        if not os.path.exists(in_path):
            raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_str(in_path)))
        try:
            shutil.copyfile(in_path, out_path)
        except shutil.Error:
            raise AnsibleError("failed to copy: {0} and {1} are the same".format(to_str(in_path), to_str(out_path)))
        except IOError as e:
            raise AnsibleError("failed to transfer file to {0}: {1}".format(to_str(out_path), to_str(e)))
Exemple #7
0
    def __init__(self, message="", obj=None, show_content=True):
        # we import this here to prevent an import loop problem,
        # since the objects code also imports ansible.errors
        from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject

        self._obj = obj
        self._show_content = show_content
        if obj and isinstance(obj, AnsibleBaseYAMLObject):
            extended_error = self._get_extended_error()
            if extended_error:
                self.message = '%s\n\n%s' % (to_str(message),
                                             to_str(extended_error))
        else:
            self.message = '%s' % to_str(message)
Exemple #8
0
    def complete_cd(self, text, line, begidx, endidx):
        mline = line.partition(' ')[2]
        offs = len(mline) - len(text)

        if self.options.cwd in ('all', '*', '\\'):
            completions = self.hosts + self.groups
        else:
            completions = [
                x.name for x in self.inventory.list_hosts(self.options.cwd)
            ]

        return [
            to_str(s)[offs:] for s in completions
            if to_str(s).startswith(to_str(mline))
        ]
Exemple #9
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        try:
            source = self._find_needle('vars', self._task.args.get('_raw_params'))
        except AnsibleError as e:
            result['failed'] = True
            result['message'] = to_str(e)
            return result

        (data, show_content) = self._loader._get_file_contents(source)
        data = self._loader.load(data, show_content)
        if data is None:
            data = {}
        if not isinstance(data, dict):
            result['failed'] = True
            result['message'] = "%s must be stored as a dictionary/hash" % source
        else:
            result['ansible_facts'] = data
            result['_ansible_no_log'] = not show_content

        return result
Exemple #10
0
 def _winrm_connect(self):
     '''
     Establish a WinRM connection over HTTP/HTTPS.
     '''
     display.vvv("ESTABLISH WINRM CONNECTION FOR USER: %s on PORT %s TO %s" %
         (self._winrm_user, self._winrm_port, self._winrm_host), host=self._winrm_host)
     netloc = '%s:%d' % (self._winrm_host, self._winrm_port)
     endpoint = urlunsplit((self._winrm_scheme, netloc, self._winrm_path, '', ''))
     errors = []
     for transport in self._winrm_transport:
         if transport == 'kerberos' and not HAVE_KERBEROS:
             errors.append('kerberos: the python kerberos library is not installed')
             continue
         display.vvvvv('WINRM CONNECT: transport=%s endpoint=%s' % (transport, endpoint), host=self._winrm_host)
         try:
             protocol = Protocol(endpoint, transport=transport, **self._winrm_kwargs)
             protocol.send_message('')
             return protocol
         except Exception as e:
             err_msg = to_unicode(e).strip()
             if re.search(to_unicode(r'Operation\s+?timed\s+?out'), err_msg, re.I):
                 raise AnsibleError('the connection attempt timed out')
             m = re.search(to_unicode(r'Code\s+?(\d{3})'), err_msg)
             if m:
                 code = int(m.groups()[0])
                 if code == 401:
                     err_msg = 'the username/password specified for this server was incorrect'
                 elif code == 411:
                     return protocol
             errors.append(u'%s: %s' % (transport, err_msg))
             display.vvvvv(u'WINRM CONNECTION ERROR: %s\n%s' % (err_msg, to_unicode(traceback.format_exc())), host=self._winrm_host)
     if errors:
         raise AnsibleError(', '.join(to_str(errors)))
     else:
         raise AnsibleError('No transport found for WinRM connection')
Exemple #11
0
    def read_csv(self,
                 filename,
                 delimiter,
                 encoding='utf-8',
                 dflt=None,
                 type='dict',
                 groupby=''):

        try:
            f = codecs.open(filename, 'r', encoding='utf-8')
            creader = list(csv.reader(f, delimiter=delimiter))
            f.close

            headers = creader.pop(0)
            rows = []

            for row in creader:
                lista = dict(zip(headers, row))
                lista = {k: v for k, v in lista.items() if v}
                if groupby:
                    if groupby in lista:
                        rows.append(lista)
                else:
                    rows.append(lista)
        except Exception as e:
            raise AnsibleError("csvfile: %s" % to_str(e))

        if type == 'dict':
            return [rows]
        else:
            return rows
Exemple #12
0
    def get_host_variables(self, host):
        """ Runs <script> --host <hostname> to determine additional host variables """
        if self.host_vars_from_top is not None:
            try:
                got = self.host_vars_from_top.get(host.name, {})
            except AttributeError as e:
                raise AnsibleError(
                    "Improperly formated host information for %s: %s" %
                    (host.name, to_str(e)))
            return got

        cmd = [self.filename, "--host", host.name]
        try:
            sp = subprocess.Popen(cmd,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.PIPE)
        except OSError as e:
            raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
        (out, err) = sp.communicate()
        if out.strip() == '':
            return dict()
        try:
            return json_dict_bytes_to_unicode(self._loader.load(out))
        except ValueError:
            raise AnsibleError(
                "could not parse post variable response: %s, %s" % (cmd, out))
Exemple #13
0
    def put_file(self, in_path, out_path):
        super(Connection, self).put_file(in_path, out_path)
        out_path = self._shell._unquote(out_path)
        display.vvv('PUT "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
        if not os.path.exists(in_path):
            raise AnsibleFileNotFound('file or module does not exist: "%s"' % in_path)

        script_template = '''
            begin {{
                $path = "{0}"

                $DebugPreference = "Continue"
                $ErrorActionPreference = "Stop"
                Set-StrictMode -Version 2

                $fd = [System.IO.File]::Create($path)

                $sha1 = [System.Security.Cryptography.SHA1CryptoServiceProvider]::Create()

                $bytes = @() #initialize for empty file case
            }}
            process {{
               $bytes = [System.Convert]::FromBase64String($input)
               $sha1.TransformBlock($bytes, 0, $bytes.Length, $bytes, 0) | Out-Null
               $fd.Write($bytes, 0, $bytes.Length)
            }}
            end {{
                $sha1.TransformFinalBlock($bytes, 0, 0) | Out-Null

                $hash = [System.BitConverter]::ToString($sha1.Hash).Replace("-", "").ToLowerInvariant()

                $fd.Close()

                Write-Output "{{""sha1"":""$hash""}}"
            }}
        '''

        # FUTURE: this sucks- why can't the module/shell stuff do this?
        with open(in_path, 'r') as temp_file:
            if temp_file.read(15).lower().startswith('#!powershell') and not out_path.lower().endswith('.ps1'):
                out_path = out_path + '.ps1'

        script = script_template.format(self._shell._escape(out_path))
        cmd_parts = self._shell._encode_script(script, as_list=True, strict_mode=False)

        result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], stdin_iterator=self._put_file_stdin_iterator(in_path, out_path))
        # TODO: improve error handling
        if result.status_code != 0:
            raise AnsibleError(to_str(result.std_err))

        put_output = json.loads(result.std_out)
        remote_sha1 = put_output.get("sha1")

        if not remote_sha1:
            raise AnsibleError("Remote sha1 was not returned")

        local_sha1 = secure_hash(in_path)

        if not remote_sha1 == local_sha1:
            raise AnsibleError("Remote sha1 hash {0} does not match local hash {1}".format(remote_sha1, local_sha1))
Exemple #14
0
    def get_real_file(self, file_path):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" %
                                     to_str(file_path))

        b_file_path = to_bytes(file_path, errors='strict')
        if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
            raise AnsibleFileNotFound(
                "the file_name '%s' does not exist, or is not readable" %
                to_str(file_path))

        if not self._vault:
            self._vault = VaultLib(password="")

        real_path = self.path_dwim(file_path)

        try:
            with open(to_bytes(real_path), 'rb') as f:
                if self._vault.is_encrypted(f):
                    # if the file is encrypted and no password was specified,
                    # the decrypt call would throw an error, but we check first
                    # since the decrypt function doesn't know the file name
                    data = f.read()
                    if not self._vault_password:
                        raise AnsibleParserError(
                            "A vault password must be specified to decrypt %s"
                            % file_path)

                    data = self._vault.decrypt(data, filename=real_path)
                    # Make a temp file
                    real_path = self._create_content_tempfile(data)
                    self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (to_str(real_path), to_str(e)))
    def run(self, tmp=None, task_vars=None):
        """ Load yml files recursively from a directory.
        """
        self.VALID_FILE_EXTENSIONS = ['yaml', 'yml', '.json']
        if not task_vars:
            task_vars = dict()

        self.show_content = True
        self._set_args()

        results = dict()
        if self.source_dir:
            self._set_dir_defaults()
            self._set_root_dir()
            if path.exists(self.source_dir):
                for root_dir, filenames in self._traverse_dir_depth():
                    failed, err_msg, updated_results = (
                        self._load_files_in_dir(root_dir, filenames)
                    )
                    if not failed:
                        results.update(updated_results)
                    else:
                        break
            else:
                failed = True
                err_msg = (
                    '{0} directory does not exist'.format(self.source_dir)
                )
        else:
            try:
                self.source_file = self._find_needle('vars', self.source_file)
                failed, err_msg, updated_results = (
                    self._load_files(self.source_file)
                )
                if not failed:
                    results.update(updated_results)

            except AnsibleError as e:
                err_msg = to_str(e)
                raise AnsibleError(err_msg)

        if self.return_results_as_name:
            scope = dict()
            scope[self.return_results_as_name] = results
            results = scope

        result = super(ActionModule, self).run(tmp, task_vars)

        if failed:
            result['failed'] = failed
            result['message'] = err_msg

        result['ansible_facts'] = results
        result['_ansible_no_log'] = not self.show_content

        return result
Exemple #16
0
    def put_file(self, in_path, out_path):
        super(Connection, self).put_file(in_path, out_path)
        out_path = self._shell._unquote(out_path)
        display.vvv('PUT "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
        if not os.path.exists(in_path):
            raise AnsibleFileNotFound('file or module does not exist: "%s"' % in_path)

        script_template = '''
            begin {{
                $path = "{0}"

                $DebugPreference = "Continue"
                $ErrorActionPreference = "Stop"
                Set-StrictMode -Version 2

                $fd = [System.IO.File]::Create($path)

                $sha1 = [System.Security.Cryptography.SHA1CryptoServiceProvider]::Create()

                $bytes = @() #initialize for empty file case
            }}
            process {{
               $bytes = [System.Convert]::FromBase64String($input)
               $sha1.TransformBlock($bytes, 0, $bytes.Length, $bytes, 0) | Out-Null
               $fd.Write($bytes, 0, $bytes.Length)
            }}
            end {{
                $sha1.TransformFinalBlock($bytes, 0, 0) | Out-Null

                $hash = [System.BitConverter]::ToString($sha1.Hash).Replace("-", "").ToLowerInvariant()

                $fd.Close()

                Write-Output "{{""sha1"":""$hash""}}"
            }}
        '''

        script = script_template.format(self._shell._escape(out_path))
        cmd_parts = self._shell._encode_script(script, as_list=True, strict_mode=False)

        result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], stdin_iterator=self._put_file_stdin_iterator(in_path, out_path))
        # TODO: improve error handling
        if result.status_code != 0:
            raise AnsibleError(to_str(result.std_err))

        put_output = json.loads(result.std_out)
        remote_sha1 = put_output.get("sha1")

        if not remote_sha1:
            raise AnsibleError("Remote sha1 was not returned")

        local_sha1 = secure_hash(in_path)

        if not remote_sha1 == local_sha1:
            raise AnsibleError("Remote sha1 hash {0} does not match local hash {1}".format(to_str(remote_sha1), to_str(local_sha1)))
Exemple #17
0
def makedirs_safe(path, mode=None):
    '''Safe way to create dirs in muliprocess/thread environments'''
    if not os.path.exists(to_bytes(path, errors='strict')):
        try:
            if mode:
                os.makedirs(path, mode)
            else:
                os.makedirs(path)
        except OSError as e:
            if e.errno != EEXIST:
                raise AnsibleError(
                    "Unable to create local directories(%s): %s" %
                    (path, to_str(e)))
    def read_csv(self, filename, key, delimiter, encoding="utf-8", dflt=None, col=1):

        try:
            f = open(filename, "r")
            creader = CSVReader(f, delimiter=to_bytes(delimiter), encoding=encoding)

            for row in creader:
                if row[0] == key:
                    return row[int(col)]
        except Exception as e:
            raise AnsibleError("csvfile: %s" % to_str(e))

        return dflt
Exemple #19
0
 def put_file(self, in_path, out_path):
     super(Connection, self).put_file(in_path, out_path)
     out_path = self._shell._unquote(out_path)
     self._display.vvv('PUT "%s" TO "%s"' % (in_path, out_path),
                       host=self._winrm_host)
     if not os.path.exists(in_path):
         raise AnsibleFileNotFound('file or module does not exist: "%s"' %
                                   in_path)
     with open(in_path) as in_file:
         in_size = os.path.getsize(in_path)
         script_template = '''
             $s = [System.IO.File]::OpenWrite("%s");
             [void]$s.Seek(%d, [System.IO.SeekOrigin]::Begin);
             $b = [System.Convert]::FromBase64String("%s");
             [void]$s.Write($b, 0, $b.length);
             [void]$s.SetLength(%d);
             [void]$s.Close();
         '''
         # Determine max size of data we can pass per command.
         script = script_template % (self._shell._escape(out_path), in_size,
                                     '', in_size)
         cmd = self._shell._encode_script(script)
         # Encode script with no data, subtract its length from 8190 (max
         # windows command length), divide by 2.67 (UTF16LE base64 command
         # encoding), then by 1.35 again (data base64 encoding).
         buffer_size = int(((8190 - len(cmd)) / 2.67) / 1.35)
         for offset in xrange(0, in_size or 1, buffer_size):
             try:
                 out_data = in_file.read(buffer_size)
                 if offset == 0:
                     if out_data.lower().startswith(
                             '#!powershell'
                     ) and not out_path.lower().endswith('.ps1'):
                         out_path = out_path + '.ps1'
                 b64_data = base64.b64encode(out_data)
                 script = script_template % (self._shell._escape(out_path),
                                             offset, b64_data, in_size)
                 self._display.vvvvv(
                     'WINRM PUT "%s" to "%s" (offset=%d size=%d)' %
                     (in_path, out_path, offset, len(out_data)),
                     host=self._winrm_host)
                 cmd_parts = self._shell._encode_script(script,
                                                        as_list=True)
                 result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
                 if result.status_code != 0:
                     raise IOError(to_str(result.std_err))
             except Exception:
                 traceback.print_exc()
                 raise AnsibleError('failed to transfer file to "%s"' %
                                    out_path)
Exemple #20
0
def selinux_context(path):
    context = [None, None, None, None]
    if HAVE_SELINUX and selinux.is_selinux_enabled():
        try:
            # note: the selinux module uses byte strings on python2 and text
            # strings on python3
            ret = selinux.lgetfilecon_raw(to_str(path))
        except OSError:
            return context
        if ret[0] != -1:
            # Limit split to 4 because the selevel, the last in the list,
            # may contain ':' characters
            context = ret[1].split(':', 3)
    return context
Exemple #21
0
    def __init__(self, loader, groups=None, filename=C.DEFAULT_HOST_LIST):
        if groups is None:
            groups = dict()

        self._loader = loader
        self.groups = groups

        # Support inventory scripts that are not prefixed with some
        # path information but happen to be in the current working
        # directory when '.' is not in PATH.
        self.filename = os.path.abspath(filename)
        cmd = [self.filename, "--list"]
        try:
            sp = subprocess.Popen(cmd,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.PIPE)
        except OSError as e:
            raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
        (stdout, stderr) = sp.communicate()

        if sp.returncode != 0:
            raise AnsibleError(
                "Inventory script (%s) had an execution error: %s " %
                (filename, stderr))

        # make sure script output is unicode so that json loader will output
        # unicode strings itself
        try:
            self.data = to_unicode(stdout, errors="strict")
        except Exception as e:
            raise AnsibleError(
                "inventory data from {0} contained characters that cannot be interpreted as UTF-8: {1}"
                .format(to_str(self.filename), to_str(e)))

        # see comment about _meta below
        self.host_vars_from_top = None
        self._parse(stderr)
Exemple #22
0
    def _generate_retry_inventory(self, retry_path, replay_hosts):
        '''
        Called when a playbook run fails. It generates an inventory which allows
        re-running on ONLY the failed hosts.  This may duplicate some variable
        information in group_vars/host_vars but that is ok, and expected.
        '''
        try:
            makedirs_safe(os.path.dirname(retry_path))
            with open(retry_path, 'w') as fd:
                for x in replay_hosts:
                    fd.write("%s\n" % x)
        except Exception as e:
            display.warning("Could not create retry file '%s'.\n\t%s" % (retry_path, to_str(e)))
            return False

        return True
Exemple #23
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        src        = self._task.args.get('src', None)
        remote_src = boolean(self._task.args.get('remote_src', 'no'))
        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user

        if src is None:
            result['failed'] = True
            result['msg'] = "src is required"
            return result
        elif remote_src:
            # everything is remote, so we just execute the module
            # without changing any of the module arguments
            result.update(self._execute_module(task_vars=task_vars))
            return result

        try:
            src = self._find_needle('files', src)
        except AnsibleError as e:
            result['failed'] = True
            result['msg'] = to_str(e)
            return result

        # create the remote tmp dir if needed, and put the source file there
        if tmp is None or "-tmp-" not in tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
        self._transfer_file(src, tmp_src)

        self._fixup_perms(tmp, remote_user, recursive=True)

        new_module_args = self._task.args.copy()
        new_module_args.update(
            dict(
                src=tmp_src,
            )
        )

        result.update(self._execute_module('patch', module_args=new_module_args, task_vars=task_vars))
        self._remove_tmp_path(tmp)
        return result
Exemple #24
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        src = self._task.args.get('src', None)
        remote_src = boolean(self._task.args.get('remote_src', 'no'))
        remote_user = task_vars.get(
            'ansible_ssh_user') or self._play_context.remote_user

        if src is None:
            result['failed'] = True
            result['msg'] = "src is required"
            return result
        elif remote_src:
            # everything is remote, so we just execute the module
            # without changing any of the module arguments
            result.update(self._execute_module(task_vars=task_vars))
            return result

        try:
            src = self._find_needle('files', src)
        except AnsibleError as e:
            result['failed'] = True
            result['msg'] = to_str(e)
            return result

        # create the remote tmp dir if needed, and put the source file there
        if tmp is None or "-tmp-" not in tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
        self._transfer_file(src, tmp_src)

        self._fixup_perms(tmp, remote_user, recursive=True)

        new_module_args = self._task.args.copy()
        new_module_args.update(dict(src=tmp_src, ))

        result.update(
            self._execute_module('patch',
                                 module_args=new_module_args,
                                 task_vars=task_vars))
        self._remove_tmp_path(tmp)
        return result
Exemple #25
0
    def _find_needle(self, dirname, needle):
        '''
            find a needle in haystack of paths, optionally using 'dirname' as a subdir.
            This will build the ordered list of paths to search and pass them to dwim
            to get back the first existing file found.
        '''

        path_stack = self._task.get_search_path()

        result = self._loader.path_dwim_relative_stack(path_stack, dirname,
                                                       needle)

        if result is None:
            raise AnsibleError("Unable to find '%s' in expected paths." %
                               to_str(needle))

        return result
Exemple #26
0
    def all(self, *args, **kwargs):
        ''' instantiates all plugins with the same arguments '''

        class_only = kwargs.pop('class_only', False)
        all_matches = []

        for i in self._get_paths():
            all_matches.extend(glob.glob(os.path.join(i, "*.py")))

        for path in sorted(all_matches,
                           key=lambda match: os.path.basename(match)):
            name, _ = os.path.splitext(path)
            if '__init__' in name:
                continue

            if path not in self._module_cache:
                self._module_cache[path] = self._load_module_source(name, path)

            try:
                obj = getattr(self._module_cache[path], self.class_name)
            except AttributeError as e:
                display.warning(
                    "Skipping plugin (%s) as it seems to be invalid: %s" %
                    (path, to_str(e)))

            if self.base_class:
                # The import path is hardcoded and should be the right place,
                # so we are not expecting an ImportError.
                module = __import__(self.package, fromlist=[self.base_class])
                # Check whether this obj has the required base class.
                try:
                    plugin_class = getattr(module, self.base_class)
                except AttributeError:
                    continue
                if not issubclass(obj, plugin_class):
                    continue

            if not class_only:
                obj = obj(*args, **kwargs)

            # set extra info on the module, in case we want it later
            setattr(obj, '_original_path', path)
            yield obj
Exemple #27
0
def makedirs_safe(path, mode=None):
    '''Safe way to create dirs in muliprocess/thread environments.

    :arg path: A byte or text string representing a directory to be created
    :kwarg mode: If given, the mode to set the directory to
    :raises AnsibleError: If the directory cannot be created and does not already exists.
    :raises UnicodeDecodeError: if the path is not decodable in the utf-8 encoding.
    '''

    rpath = unfrackpath(path)
    b_rpath = to_bytes(rpath)
    if not os.path.exists(b_rpath):
        try:
            if mode:
                os.makedirs(b_rpath, mode)
            else:
                os.makedirs(b_rpath)
        except OSError as e:
            if e.errno != EEXIST:
                raise AnsibleError("Unable to create local directories(%s): %s" % (to_str(rpath), to_str(e)))
Exemple #28
0
    def read_csv(self,
                 filename,
                 key,
                 delimiter,
                 encoding='utf-8',
                 dflt=None,
                 col=1):

        try:
            f = open(filename, 'r')
            creader = CSVReader(f,
                                delimiter=to_bytes(delimiter),
                                encoding=encoding)

            for row in creader:
                if row[0] == key:
                    return row[int(col)]
        except Exception as e:
            raise AnsibleError("csvfile: %s" % to_str(e))

        return dflt
Exemple #29
0
    def run(self, tmp=None, task_vars=None):

        varname = self._task.args.get('name')
        source = self._task.args.get('file')
        if not source:
            source = self._task.args.get('_raw_params')
            if source is None:
                raise AnsibleError("No filename was found for the included vars. " + \
                                   "Use `- include_vars: <filename>` or the `file:` option " + \
                                   "to specify the vars filename.", self._task._ds)

        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        try:
            source = self._find_needle('vars', source)
        except AnsibleError as e:
            result['failed'] = True
            result['message'] = to_str(e)
            return result

        (data, show_content) = self._loader._get_file_contents(source)
        data = self._loader.load(data, show_content)
        if data is None:
            data = {}
        if not isinstance(data, dict):
            result['failed'] = True
            result[
                'message'] = "%s must be stored as a dictionary/hash" % source
        else:
            if varname:
                scope = {}
                scope[varname] = data
                data = scope
            result['ansible_facts'] = data
            result['_ansible_no_log'] = not show_content

        return result
Exemple #30
0
    def run(self, tmp=None, task_vars=None):

        varname = self._task.args.get('name')
        source = self._task.args.get('file')
        if not source:
            source = self._task.args.get('_raw_params')
            if source is None:
                raise AnsibleError("No filename was found for the included vars. " + \
                                   "Use `- include_vars: <filename>` or the `file:` option " + \
                                   "to specify the vars filename.", self._task._ds)

        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        try:
            source = self._find_needle('vars', source)
        except AnsibleError as e:
            result['failed'] = True
            result['message'] = to_str(e)
            return result

        (data, show_content) = self._loader._get_file_contents(source)
        data = self._loader.load(data, show_content)
        if data is None:
            data = {}
        if not isinstance(data, dict):
            result['failed'] = True
            result['message'] = "%s must be stored as a dictionary/hash" % source
        else:
            if varname:
                scope = {}
                scope[varname] = data
                data = scope
            result['ansible_facts'] = data
            result['_ansible_no_log'] = not show_content

        return result
Exemple #31
0
def _validate_mutable_mappings(a, b):
    """
    Internal convenience function to ensure arguments are MutableMappings

    This checks that all arguments are MutableMappings or raises an error

    :raises AnsibleError: if one of the arguments is not a MutableMapping
    """

    # If this becomes generally needed, change the signature to operate on
    # a variable number of arguments instead.

    if not (isinstance(a, MutableMapping) and isinstance(b, MutableMapping)):
        myvars = []
        for x in [a, b]:
            try:
                myvars.append(dumps(x))
            except:
                myvars.append(to_str(x))
        raise AnsibleError("failed to combine variables, expected dicts but got a '{0}' and a '{1}': \n{2}\n{3}".format(
            a.__class__.__name__, b.__class__.__name__, myvars[0], myvars[1])
        )
Exemple #32
0
def _validate_mutable_mappings(a, b):
    """
    Internal convenience function to ensure arguments are MutableMappings

    This checks that all arguments are MutableMappings or raises an error

    :raises AnsibleError: if one of the arguments is not a MutableMapping
    """

    # If this becomes generally needed, change the signature to operate on
    # a variable number of arguments instead.

    if not (isinstance(a, MutableMapping) and isinstance(b, MutableMapping)):
        myvars = []
        for x in [a, b]:
            try:
                myvars.append(dumps(x))
            except:
                myvars.append(to_str(x))
        raise AnsibleError(
            "failed to combine variables, expected dicts but got a '{0}' and a '{1}': \n{2}\n{3}"
            .format(a.__class__.__name__, b.__class__.__name__, myvars[0],
                    myvars[1]))
Exemple #33
0
 def fetch_file(self, in_path, out_path):
     super(Connection, self).fetch_file(in_path, out_path)
     in_path = self._shell._unquote(in_path)
     out_path = out_path.replace('\\', '/')
     self._display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path),
                       host=self._winrm_host)
     buffer_size = 2**19  # 0.5MB chunks
     makedirs_safe(os.path.dirname(out_path))
     out_file = None
     try:
         offset = 0
         while True:
             try:
                 script = '''
                     If (Test-Path -PathType Leaf "%(path)s")
                     {
                         $stream = [System.IO.File]::OpenRead("%(path)s");
                         $stream.Seek(%(offset)d, [System.IO.SeekOrigin]::Begin) | Out-Null;
                         $buffer = New-Object Byte[] %(buffer_size)d;
                         $bytesRead = $stream.Read($buffer, 0, %(buffer_size)d);
                         $bytes = $buffer[0..($bytesRead-1)];
                         [System.Convert]::ToBase64String($bytes);
                         $stream.Close() | Out-Null;
                     }
                     ElseIf (Test-Path -PathType Container "%(path)s")
                     {
                         Write-Host "[DIR]";
                     }
                     Else
                     {
                         Write-Error "%(path)s does not exist";
                         Exit 1;
                     }
                 ''' % dict(buffer_size=buffer_size,
                            path=self._shell._escape(in_path),
                            offset=offset)
                 self._display.vvvvv(
                     'WINRM FETCH "%s" to "%s" (offset=%d)' %
                     (in_path, out_path, offset),
                     host=self._winrm_host)
                 cmd_parts = self._shell._encode_script(script,
                                                        as_list=True)
                 result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
                 if result.status_code != 0:
                     raise IOError(to_str(result.std_err))
                 if result.std_out.strip() == '[DIR]':
                     data = None
                 else:
                     data = base64.b64decode(result.std_out.strip())
                 if data is None:
                     makedirs_safe(out_path)
                     break
                 else:
                     if not out_file:
                         # If out_path is a directory and we're expecting a file, bail out now.
                         if os.path.isdir(out_path):
                             break
                         out_file = open(out_path, 'wb')
                     out_file.write(data)
                     if len(data) < buffer_size:
                         break
                     offset += len(data)
             except Exception:
                 traceback.print_exc()
                 raise AnsibleError('failed to transfer file to "%s"' %
                                    out_path)
     finally:
         if out_file:
             out_file.close()
Exemple #34
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        content = self._task.args.get('content', None)
        dest    = self._task.args.get('dest', None)
        raw     = boolean(self._task.args.get('raw', 'no'))
        force   = boolean(self._task.args.get('force', 'yes'))
        faf     = self._task.first_available_file
        remote_src = boolean(self._task.args.get('remote_src', False))
        follow  = boolean(self._task.args.get('follow', False))

        if (source is None and content is None and faf is None) or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result
        elif (source is not None or faf is not None) and content is not None:
            result['failed'] = True
            result['msg'] = "src and content are mutually exclusive"
            return result
        elif content is not None and dest is not None and dest.endswith("/"):
            result['failed'] = True
            result['msg'] = "dest must be a file if content is defined"
            return result

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = self._connection._shell.path_has_trailing_slash(source)

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if isinstance(content, dict) or isinstance(content, list):
                    content_tempfile = self._create_content_tempfile(json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception as err:
                result['failed'] = True
                result['msg'] = "could not write content temp file: %s" % to_str(err)
                return result

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        elif faf:
            source = self._get_first_available_file(faf, task_vars.get('_original_file', None))
        elif remote_src:
            result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars, delete_remote_tmp=False))
            return result
        else: # find in expected paths
            try:
                source = self._find_needle('files', source)
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_str(e)
                return result

        # A list of source file tuples (full_path, relative_path) which will try to copy to the destination
        source_files = []

        # If source is a directory populate our list else source is a file and translate it to a tuple.
        if os.path.isdir(to_bytes(source, errors='strict')):
            # Get the amount of spaces to remove to get the relative path.
            if source_trailing_slash:
                sz = len(source)
            else:
                sz = len(source.rsplit('/', 1)[0]) + 1

            # Walk the directory and append the file tuples to source_files.
            for base_path, sub_folders, files in os.walk(source):
                for file in files:
                    full_path = os.path.join(base_path, file)
                    rel_path = full_path[sz:]
                    if rel_path.startswith('/'):
                        rel_path = rel_path[1:]
                    source_files.append((full_path, rel_path))

            # If it's recursive copy, destination is always a dir,
            # explicitly mark it so (note - copy module relies on this).
            if not self._connection._shell.path_has_trailing_slash(dest):
                dest = self._connection._shell.join_path(dest, '')
        else:
            source_files.append((source, os.path.basename(source)))

        changed = False
        module_return = dict(changed=False)

        # A register for if we executed a module.
        # Used to cut down on command calls when not recursive.
        module_executed = False

        # Tell _execute_module to delete the file if there is one file.
        delete_remote_tmp = (len(source_files) == 1)

        # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not delete_remote_tmp:
            if tmp is None or "-tmp-" not in tmp:
                tmp = self._make_tmp_path(remote_user)
                self._cleanup_remote_tmp = True

        # expand any user home dir specifier
        dest = self._remote_expand_user(dest)

        diffs = []
        for source_full, source_rel in source_files:

            source_full = self._loader.get_real_file(source_full)

            # Generate a hash of the local file.
            local_checksum = checksum(source_full)

            # If local_checksum is not defined we can't find the file so we should fail out.
            if local_checksum is None:
                result['failed'] = True
                result['msg'] = "could not find src=%s" % source_full
                self._remove_tmp_path(tmp)
                return result

            # This is kind of optimization - if user told us destination is
            # dir, do path manipulation right away, otherwise we still check
            # for dest being a dir via remote call below.
            if self._connection._shell.path_has_trailing_slash(dest):
                dest_file = self._connection._shell.join_path(dest, source_rel)
            else:
                dest_file = self._connection._shell.join_path(dest)

            # Attempt to get remote file info
            dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp)

            if dest_status['exists'] and dest_status['isdir']:
                # The dest is a directory.
                if content is not None:
                    # If source was defined as content remove the temporary file and fail out.
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    self._remove_tmp_path(tmp)
                    result['failed'] = True
                    result['msg'] = "can not use content with a dir as dest"
                    return result
                else:
                    # Append the relative source location to the destination and get remote stats again
                    dest_file = self._connection._shell.join_path(dest, source_rel)
                    dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp)

            if dest_status['exists'] and not force:
                # remote_file does not exist so continue to next iteration.
                continue

            if local_checksum != dest_status['checksum']:
                # The checksums don't match and we will change or error out.
                changed = True

                # Create a tmp path if missing only if this is not recursive.
                # If this is recursive we already have a tmp path.
                if delete_remote_tmp:
                    if tmp is None or "-tmp-" not in tmp:
                        tmp = self._make_tmp_path(remote_user)
                        self._cleanup_remote_tmp = True

                if self._play_context.diff and not raw:
                    diffs.append(self._get_diff_data(dest_file, source_full, task_vars))

                if self._play_context.check_mode:
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    changed = True
                    module_return = dict(changed=True)
                    continue

                # Define a remote directory that we will copy the file to.
                tmp_src = self._connection._shell.join_path(tmp, 'source')

                if not raw:
                    self._transfer_file(source_full, tmp_src)
                else:
                    self._transfer_file(source_full, dest_file)

                # We have copied the file remotely and no longer require our content_tempfile
                self._remove_tempfile_if_content_defined(content, content_tempfile)
                self._loader.cleanup_tmp_file(source_full)

                # fix file permissions when the copy is done as a different user
                self._fixup_perms(tmp, remote_user, recursive=True)

                if raw:
                    # Continue to next iteration if raw is defined.
                    continue

                # Run the copy module

                # src and dest here come after original and override them
                # we pass dest only to make sure it includes trailing slash in case of recursive copy
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=tmp_src,
                        dest=dest,
                        original_basename=source_rel,
                    )
                )
                if 'content' in new_module_args:
                    del new_module_args['content']

                module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            else:
                # no need to transfer the file, already correct hash, but still need to call
                # the file module in case we want to change attributes
                self._remove_tempfile_if_content_defined(content, content_tempfile)
                self._loader.cleanup_tmp_file(source_full)

                if raw:
                    # Continue to next iteration if raw is defined.
                    self._remove_tmp_path(tmp)
                    continue

                # Build temporary module_args.
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=source_rel,
                        dest=dest,
                        original_basename=source_rel
                    )
                )

                # Execute the file module.
                module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            if not module_return.get('checksum'):
                module_return['checksum'] = local_checksum
            if module_return.get('failed'):
                result.update(module_return)
                if not delete_remote_tmp:
                    self._remove_tmp_path(tmp)
                return result
            if module_return.get('changed'):
                changed = True

            # the file module returns the file path as 'path', but
            # the copy module uses 'dest', so add it if it's not there
            if 'path' in module_return and 'dest' not in module_return:
                module_return['dest'] = module_return['path']

        # Delete tmp path if we were recursive or if we did not execute a module.
        if not delete_remote_tmp or (delete_remote_tmp and not module_executed):
            self._remove_tmp_path(tmp)

        if module_executed and len(source_files) == 1:
            result.update(module_return)
        else:
            result.update(dict(dest=dest, src=source, changed=changed))

        if diffs:
            result['diff'] = diffs

        return result
Exemple #35
0
    def _parse(self, err):

        all_hosts = {}

        # not passing from_remote because data from CMDB is trusted
        try:
            self.raw = self._loader.load(self.data)
        except Exception as e:
            sys.stderr.write(err + "\n")
            raise AnsibleError("failed to parse executable inventory script results from {0}: {1}".format(to_str(self.filename), to_str(e)))

        if not isinstance(self.raw, Mapping):
            sys.stderr.write(err + "\n")
            raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted as a json dict".format(to_str(self.filename)))

        group = None
        for (group_name, data) in self.raw.items():

            # in Ansible 1.3 and later, a "_meta" subelement may contain
            # a variable "hostvars" which contains a hash for each host
            # if this "hostvars" exists at all then do not call --host for each
            # host.  This is for efficiency and scripts should still return data
            # if called with --host for backwards compat with 1.2 and earlier.

            if group_name == '_meta':
                if 'hostvars' in data:
                    self.host_vars_from_top = data['hostvars']
                    continue

            if group_name not in self.groups:
                group = self.groups[group_name] = Group(group_name)

            group = self.groups[group_name]
            host = None

            if not isinstance(data, dict):
                data = {'hosts': data}
            # is not those subkeys, then simplified syntax, host with vars
            elif not any(k in data for k in ('hosts','vars','children')):
                data = {'hosts': [group_name], 'vars': data}

            if 'hosts' in data:
                if not isinstance(data['hosts'], list):
                    raise AnsibleError("You defined a group \"%s\" with bad "
                        "data for the host list:\n %s" % (group_name, data))

                for hostname in data['hosts']:
                    if hostname not in all_hosts:
                        all_hosts[hostname] = Host(hostname)
                    host = all_hosts[hostname]
                    group.add_host(host)

            if 'vars' in data:
                if not isinstance(data['vars'], dict):
                    raise AnsibleError("You defined a group \"%s\" with bad "
                        "data for variables:\n %s" % (group_name, data))

                for k, v in iteritems(data['vars']):
                    group.set_variable(k, v)

        # Separate loop to ensure all groups are defined
        for (group_name, data) in self.raw.items():
            if group_name == '_meta':
                continue
            if isinstance(data, dict) and 'children' in data:
                for child_name in data['children']:
                    if child_name in self.groups:
                        self.groups[group_name].add_child_group(self.groups[child_name])

        # Finally, add all top-level groups as children of 'all'.
        # We exclude ungrouped here because it was already added as a child of
        # 'all' at the time it was created.

        for group in self.groups.values():
            if group.depth == 0 and group.name not in ('all', 'ungrouped'):
                self.groups['all'].add_child_group(group)
Exemple #36
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        if self._play_context.check_mode:
            result['skipped'] = True
            result['msg'] = 'check mode not supported for this module'
            return result

        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        creates = self._task.args.get('creates')
        if creates:
            # do not run the command if the line contains creates=filename
            # and the filename already exists. This allows idempotence
            # of command executions.
            if self._remote_file_exists(creates):
                self._remove_tmp_path(tmp)
                return dict(skipped=True, msg=("skipped, since %s exists" % creates))

        removes = self._task.args.get('removes')
        if removes:
            # do not run the command if the line contains removes=filename
            # and the filename does not exist. This allows idempotence
            # of command executions.
            if not self._remote_file_exists(removes):
                self._remove_tmp_path(tmp)
                return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))

        # the script name is the first item in the raw params, so we split it
        # out now so we know the file name we need to transfer to the remote,
        # and everything else is an argument to the script which we need later
        # to append to the remote command
        parts  = self._task.args.get('_raw_params', '').strip().split()
        source = parts[0]
        args   = ' '.join(parts[1:])

        try:
            source = self._loader.get_real_file(self._find_needle('files', source))
        except AnsibleError as e:
            return dict(failed=True, msg=to_str(e))

        # transfer the file to a remote tmp location
        tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source))
        self._transfer_file(source, tmp_src)

        # set file permissions, more permissive when the copy is done as a different user
        self._fixup_perms((tmp, tmp_src), remote_user, execute=True)

        # add preparation steps to one ssh roundtrip executing the script
        env_string = self._compute_environment_string()
        script_cmd = ' '.join([env_string, tmp_src, args])

        result.update(self._low_level_execute_command(cmd=script_cmd, sudoable=True))

        # clean up after
        self._remove_tmp_path(tmp)

        result['changed'] = True

        return result
Exemple #37
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        if self._play_context.check_mode:
            result['skipped'] = True
            result['msg'] = "skipped, this module does not support check_mode."
            return result

        src        = self._task.args.get('src', None)
        dest       = self._task.args.get('dest', None)
        delimiter  = self._task.args.get('delimiter', None)
        remote_src = self._task.args.get('remote_src', 'yes')
        regexp     = self._task.args.get('regexp', None)
        follow     = self._task.args.get('follow', False)
        ignore_hidden = self._task.args.get('ignore_hidden', False)

        if src is None or dest is None:
            result['failed'] = True
            result['msg'] = "src and dest are required"
            return result

        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        if boolean(remote_src):
            result.update(self._execute_module(tmp=tmp, task_vars=task_vars, delete_remote_tmp=False))
            self._remove_tmp_path(tmp)
            return result
        else:
            try:
                src = self._find_needle('files', src)
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_str(e)
                return result

        if not os.path.isdir(src):
            result['failed'] = True
            result['msg'] = "Source (%s) is not a directory" % src
            return result

        _re = None
        if regexp is not None:
            _re = re.compile(regexp)

        # Does all work assembling the file
        path = self._assemble_from_fragments(src, delimiter, _re, ignore_hidden)

        path_checksum = checksum_s(path)
        dest = self._remote_expand_user(dest)
        dest_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=follow, tmp=tmp)

        diff = {}

        # setup args for running modules
        new_module_args = self._task.args.copy()

        # clean assemble specific options
        for opt in ['remote_src', 'regexp', 'delimiter', 'ignore_hidden']:
            if opt in new_module_args:
                del new_module_args[opt]

        new_module_args.update(
            dict(
                dest=dest,
                original_basename=os.path.basename(src),
            )
        )

        if path_checksum != dest_stat['checksum']:

            if self._play_context.diff:
                diff = self._get_diff_data(dest, path, task_vars)

            remote_path = self._connection._shell.join_path(tmp, 'src')
            xfered = self._transfer_file(path, remote_path)

            # fix file permissions when the copy is done as a different user
            self._fixup_perms(tmp, remote_user, recursive=True)

            new_module_args.update( dict( src=xfered,))

            res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False)
            if diff:
                res['diff'] = diff
            result.update(res)
        else:
            result.update(self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False))

        self._remove_tmp_path(tmp)

        return result
Exemple #38
0
    def run(self, tmp=None, task_vars=None):
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        if self._play_context.check_mode:
            result['skipped'] = True
            result['msg'] = "skipped, this module does not support check_mode."
            return result

        src = self._task.args.get('src', None)
        dest = self._task.args.get('dest', None)
        delimiter = self._task.args.get('delimiter', None)
        remote_src = self._task.args.get('remote_src', 'yes')
        regexp = self._task.args.get('regexp', None)
        follow = self._task.args.get('follow', False)
        ignore_hidden = self._task.args.get('ignore_hidden', False)

        if src is None or dest is None:
            result['failed'] = True
            result['msg'] = "src and dest are required"
            return result

        remote_user = task_vars.get(
            'ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        if boolean(remote_src):
            result.update(
                self._execute_module(tmp=tmp,
                                     task_vars=task_vars,
                                     delete_remote_tmp=False))
            self._remove_tmp_path(tmp)
            return result
        else:
            try:
                src = self._find_needle('files', src)
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_str(e)
                return result

        if not os.path.isdir(src):
            result['failed'] = True
            result['msg'] = "Source (%s) is not a directory" % src
            return result

        _re = None
        if regexp is not None:
            _re = re.compile(regexp)

        # Does all work assembling the file
        path = self._assemble_from_fragments(src, delimiter, _re,
                                             ignore_hidden)

        path_checksum = checksum_s(path)
        dest = self._remote_expand_user(dest)
        dest_stat = self._execute_remote_stat(dest,
                                              all_vars=task_vars,
                                              follow=follow,
                                              tmp=tmp)

        diff = {}

        # setup args for running modules
        new_module_args = self._task.args.copy()

        # clean assemble specific options
        for opt in ['remote_src', 'regexp', 'delimiter', 'ignore_hidden']:
            if opt in new_module_args:
                del new_module_args[opt]

        new_module_args.update(
            dict(
                dest=dest,
                original_basename=os.path.basename(src),
            ))

        if path_checksum != dest_stat['checksum']:

            if self._play_context.diff:
                diff = self._get_diff_data(dest, path, task_vars)

            remote_path = self._connection._shell.join_path(tmp, 'src')
            xfered = self._transfer_file(path, remote_path)

            # fix file permissions when the copy is done as a different user
            self._fixup_perms(tmp, remote_user, recursive=True)

            new_module_args.update(dict(src=xfered, ))

            res = self._execute_module(module_name='copy',
                                       module_args=new_module_args,
                                       task_vars=task_vars,
                                       tmp=tmp,
                                       delete_remote_tmp=False)
            if diff:
                res['diff'] = diff
            result.update(res)
        else:
            result.update(
                self._execute_module(module_name='file',
                                     module_args=new_module_args,
                                     task_vars=task_vars,
                                     tmp=tmp,
                                     delete_remote_tmp=False))

        self._remove_tmp_path(tmp)

        return result
Exemple #39
0
    def get_host_variables(self, host):
        """ Runs <script> --host <hostname> to determine additional host variables """
        if self.host_vars_from_top is not None:
            try:
                got = self.host_vars_from_top.get(host.name, {})
            except AttributeError as e:
                raise AnsibleError("Improperly formated host information for %s: %s" % (host.name,to_str(e)))
            return got

        cmd = [self.filename, "--host", host.name]
        try:
            sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        except OSError as e:
            raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
        (out, err) = sp.communicate()
        if out.strip() == '':
            return dict()
        try:
            return json_dict_bytes_to_unicode(self._loader.load(out))
        except ValueError:
            raise AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
Exemple #40
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for unarchive operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        dest    = self._task.args.get('dest', None)
        copy    = boolean(self._task.args.get('copy', True))
        creates = self._task.args.get('creates', None)

        if source is None or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result

        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        if creates:
            # do not run the command if the line contains creates=filename
            # and the filename already exists. This allows idempotence
            # of command executions.
            result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars)
            stat = result.get('stat', None)
            if stat and stat.get('exists', False):
                result['skipped'] = True
                result['msg'] = "skipped, since %s exists" % creates
                self._remove_tmp_path(tmp)
                return result

        dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
        source = os.path.expanduser(source)

        if copy:
            try:
                source = self._loader.get_real_file(self._find_needle('files', source))
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_str(e)
                self._remove_tmp_path(tmp)
                return result

        remote_checksum = self._remote_checksum(dest, all_vars=task_vars, follow=True)
        if remote_checksum == '4':
            result['failed'] = True
            result['msg'] = "python isn't present on the system.  Unable to compute checksum"
            self._remove_tmp_path(tmp)
            return result
        elif remote_checksum != '3':
            result['failed'] = True
            result['msg'] = "dest '%s' must be an existing dir" % dest
            self._remove_tmp_path(tmp)
            return result

        if copy:
            # transfer the file to a remote tmp location
            tmp_src = self._connection._shell.join_path(tmp, 'source')
            self._transfer_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side

        if copy:
            # fix file permissions when the copy is done as a different user
            self._fixup_perms(tmp, remote_user, recursive=True)
            # Build temporary module_args.
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    src=tmp_src,
                    original_basename=os.path.basename(source),
                ),
            )

        else:
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    original_basename=os.path.basename(source),
                ),
            )

        # execute the unarchive module now, with the updated args
        result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
        self._remove_tmp_path(tmp)
        return result
Exemple #41
0
    def __init__(self, loader, groups=None, filename=C.DEFAULT_HOST_LIST):
        if groups is None:
            groups = dict()

        self._loader = loader
        self.groups = groups

        # Support inventory scripts that are not prefixed with some
        # path information but happen to be in the current working
        # directory when '.' is not in PATH.
        self.filename = os.path.abspath(filename)
        cmd = [ self.filename, "--list" ]
        try:
            sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        except OSError as e:
            raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
        (stdout, stderr) = sp.communicate()

        if sp.returncode != 0:
            raise AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr))

        # make sure script output is unicode so that json loader will output
        # unicode strings itself
        try:
            self.data = to_unicode(stdout, errors="strict")
        except Exception as e:
            raise AnsibleError("inventory data from {0} contained characters that cannot be interpreted as UTF-8: {1}".format(to_str(self.filename), to_str(e)))

        # see comment about _meta below
        self.host_vars_from_top = None
        self._parse(stderr)
Exemple #42
0
    def preprocess_data(self, ds):
        '''
        tasks are especially complex arguments so need pre-processing.
        keep it short.
        '''

        assert isinstance(ds, dict)

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure suitable for the
        # attributes of the task class
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        # use the args parsing class to determine the action, args,
        # and the delegate_to value from the various possible forms
        # supported as legacy
        args_parser = ModuleArgsParser(task_ds=ds)
        try:
            (action, args, delegate_to) = args_parser.parse()
        except AnsibleParserError as e:
            raise AnsibleParserError(to_str(e), obj=ds)

        # the command/shell/script modules used to support the `cmd` arg,
        # which corresponds to what we now call _raw_params, so move that
        # value over to _raw_params (assuming it is empty)
        if action in ('command', 'shell', 'script'):
            if 'cmd' in args:
                if args.get('_raw_params', '') != '':
                    raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
                            " Please put everything in one or the other place.", obj=ds)
                args['_raw_params'] = args.pop('cmd')

        new_ds['action']      = action
        new_ds['args']        = args
        new_ds['delegate_to'] = delegate_to

        # we handle any 'vars' specified in the ds here, as we may
        # be adding things to them below (special handling for includes).
        # When that deprecated feature is removed, this can be too.
        if 'vars' in ds:
            # _load_vars is defined in Base, and is used to load a dictionary
            # or list of dictionaries in a standard way
            new_ds['vars'] = self._load_vars(None, ds.pop('vars'))
        else:
            new_ds['vars'] = dict()

        for (k,v) in iteritems(ds):
            if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
                # we don't want to re-assign these values, which were
                # determined by the ModuleArgsParser() above
                continue
            elif k.replace("with_", "") in lookup_loader:
                self._preprocess_loop(ds, new_ds, k, v)
            else:
                # pre-2.0 syntax allowed variables for include statements at the
                # top level of the task, so we move those into the 'vars' dictionary
                # here, and show a deprecation message as we will remove this at
                # some point in the future.
                if action == 'include' and k not in self._get_base_attributes() and k not in self.DEPRECATED_ATTRIBUTES:
                    display.deprecated("Specifying include variables at the top-level of the task is deprecated."
                            " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
                            " for currently supported syntax regarding included files and variables")
                    new_ds['vars'][k] = v
                else:
                    new_ds[k] = v

        return super(Task, self).preprocess_data(new_ds)
Exemple #43
0
    def preprocess_data(self, ds):
        '''
        tasks are especially complex arguments so need pre-processing.
        keep it short.
        '''

        assert isinstance(ds, dict)

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure suitable for the
        # attributes of the task class
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        # use the args parsing class to determine the action, args,
        # and the delegate_to value from the various possible forms
        # supported as legacy
        args_parser = ModuleArgsParser(task_ds=ds)
        try:
            (action, args, delegate_to) = args_parser.parse()
        except AnsibleParserError as e:
            raise AnsibleParserError(to_str(e), obj=ds)

        # the command/shell/script modules used to support the `cmd` arg,
        # which corresponds to what we now call _raw_params, so move that
        # value over to _raw_params (assuming it is empty)
        if action in ('command', 'shell', 'script'):
            if 'cmd' in args:
                if args.get('_raw_params', '') != '':
                    raise AnsibleError(
                        "The 'cmd' argument cannot be used when other raw parameters are specified."
                        " Please put everything in one or the other place.",
                        obj=ds)
                args['_raw_params'] = args.pop('cmd')

        new_ds['action'] = action
        new_ds['args'] = args
        new_ds['delegate_to'] = delegate_to

        # we handle any 'vars' specified in the ds here, as we may
        # be adding things to them below (special handling for includes).
        # When that deprecated feature is removed, this can be too.
        if 'vars' in ds:
            # _load_vars is defined in Base, and is used to load a dictionary
            # or list of dictionaries in a standard way
            new_ds['vars'] = self._load_vars(None, ds.get('vars'))
        else:
            new_ds['vars'] = dict()

        for (k, v) in iteritems(ds):
            if k in ('action', 'local_action', 'args',
                     'delegate_to') or k == action or k == 'shell':
                # we don't want to re-assign these values, which were
                # determined by the ModuleArgsParser() above
                continue
            elif k.replace("with_", "") in lookup_loader:
                self._preprocess_loop(ds, new_ds, k, v)
            else:
                # pre-2.0 syntax allowed variables for include statements at the
                # top level of the task, so we move those into the 'vars' dictionary
                # here, and show a deprecation message as we will remove this at
                # some point in the future.
                if action == 'include' and k not in self._get_base_attributes(
                ) and k not in self.DEPRECATED_ATTRIBUTES:
                    display.deprecated(
                        "Specifying include variables at the top-level of the task is deprecated."
                        " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
                        " for currently supported syntax regarding included files and variables"
                    )
                    new_ds['vars'][k] = v
                else:
                    new_ds[k] = v

        return super(Task, self).preprocess_data(new_ds)
Exemple #44
0
    def _parse(self, err):

        all_hosts = {}

        # not passing from_remote because data from CMDB is trusted
        try:
            self.raw = self._loader.load(self.data)
        except Exception as e:
            sys.stderr.write(err + "\n")
            raise AnsibleError("failed to parse executable inventory script results from {0}: {1}".format(to_str(self.filename), to_str(e)))

        if not isinstance(self.raw, Mapping):
            sys.stderr.write(err + "\n")
            raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted as a json dict".format(to_str(self.filename)))

        self.raw  = json_dict_bytes_to_unicode(self.raw)

        group = None
        for (group_name, data) in self.raw.items():

            # in Ansible 1.3 and later, a "_meta" subelement may contain
            # a variable "hostvars" which contains a hash for each host
            # if this "hostvars" exists at all then do not call --host for each
            # host.  This is for efficiency and scripts should still return data
            # if called with --host for backwards compat with 1.2 and earlier.

            if group_name == '_meta':
                if 'hostvars' in data:
                    self.host_vars_from_top = data['hostvars']
                    continue

            if group_name not in self.groups:
                group = self.groups[group_name] = Group(group_name)

            group = self.groups[group_name]
            host = None

            if not isinstance(data, dict):
                data = {'hosts': data}
            # is not those subkeys, then simplified syntax, host with vars
            elif not any(k in data for k in ('hosts','vars','children')):
                data = {'hosts': [group_name], 'vars': data}

            if 'hosts' in data:
                if not isinstance(data['hosts'], list):
                    raise AnsibleError("You defined a group \"%s\" with bad "
                        "data for the host list:\n %s" % (group_name, data))

                for hostname in data['hosts']:
                    if hostname not in all_hosts:
                        all_hosts[hostname] = Host(hostname)
                    host = all_hosts[hostname]
                    group.add_host(host)

            if 'vars' in data:
                if not isinstance(data['vars'], dict):
                    raise AnsibleError("You defined a group \"%s\" with bad "
                        "data for variables:\n %s" % (group_name, data))

                for k, v in iteritems(data['vars']):
                    group.set_variable(k, v)

        # Separate loop to ensure all groups are defined
        for (group_name, data) in self.raw.items():
            if group_name == '_meta':
                continue
            if isinstance(data, dict) and 'children' in data:
                for child_name in data['children']:
                    if child_name in self.groups:
                        self.groups[group_name].add_child_group(self.groups[child_name])

        # Finally, add all top-level groups as children of 'all'.
        # We exclude ungrouped here because it was already added as a child of
        # 'all' at the time it was created.

        for group in self.groups.values():
            if group.depth == 0 and group.name not in ('all', 'ungrouped'):
                self.groups['all'].add_child_group(group)
Exemple #45
0
    def _get_server_api_version(self):
        """
        Fetches the Galaxy API current version to ensure
        the API server is up and reachable.
        """
        url = '%s/api/' % self._api_server
        try:
            return_data =open_url(url, validate_certs=self._validate_certs)
        except Exception as e:
            raise AnsibleError("Failed to get data from the API server (%s): %s " % (url, to_str(e)))

        try:
            data = json.load(return_data)
        except Exception as e:
            raise AnsibleError("Could not process data from the API server (%s): %s " % (url, to_str(e)))

        if not 'current_version' in data:
            raise AnsibleError("missing required 'current_version' from server response (%s)" % url)

        return data['current_version']
Exemple #46
0
 def fetch_file(self, in_path, out_path):
     super(Connection, self).fetch_file(in_path, out_path)
     in_path = self._shell._unquote(in_path)
     out_path = out_path.replace('\\', '/')
     display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
     buffer_size = 2**19 # 0.5MB chunks
     makedirs_safe(os.path.dirname(out_path))
     out_file = None
     try:
         offset = 0
         while True:
             try:
                 script = '''
                     If (Test-Path -PathType Leaf "%(path)s")
                     {
                         $stream = [System.IO.File]::OpenRead("%(path)s");
                         $stream.Seek(%(offset)d, [System.IO.SeekOrigin]::Begin) | Out-Null;
                         $buffer = New-Object Byte[] %(buffer_size)d;
                         $bytesRead = $stream.Read($buffer, 0, %(buffer_size)d);
                         $bytes = $buffer[0..($bytesRead-1)];
                         [System.Convert]::ToBase64String($bytes);
                         $stream.Close() | Out-Null;
                     }
                     ElseIf (Test-Path -PathType Container "%(path)s")
                     {
                         Write-Host "[DIR]";
                     }
                     Else
                     {
                         Write-Error "%(path)s does not exist";
                         Exit 1;
                     }
                 ''' % dict(buffer_size=buffer_size, path=self._shell._escape(in_path), offset=offset)
                 display.vvvvv('WINRM FETCH "%s" to "%s" (offset=%d)' % (in_path, out_path, offset), host=self._winrm_host)
                 cmd_parts = self._shell._encode_script(script, as_list=True)
                 result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
                 if result.status_code != 0:
                     raise IOError(to_str(result.std_err))
                 if result.std_out.strip() == '[DIR]':
                     data = None
                 else:
                     data = base64.b64decode(result.std_out.strip())
                 if data is None:
                     makedirs_safe(out_path)
                     break
                 else:
                     if not out_file:
                         # If out_path is a directory and we're expecting a file, bail out now.
                         if os.path.isdir(out_path):
                             break
                         out_file = open(out_path, 'wb')
                     out_file.write(data)
                     if len(data) < buffer_size:
                         break
                     offset += len(data)
             except Exception:
                 traceback.print_exc()
                 raise AnsibleError('failed to transfer file to "%s"' % out_path)
     finally:
         if out_file:
             out_file.close()
    def _do_template(self, data, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None):

        # For preserving the number of input newlines in the output (used
        # later in this method)
        data_newlines = _count_newlines_from_end(data)

        if fail_on_undefined is None:
            fail_on_undefined = self._fail_on_undefined_errors

        try:
            # allows template header overrides to change jinja2 options.
            if overrides is None:
                myenv = self.environment.overlay()
            else:
                myenv = self.environment.overlay(overrides)

            # Get jinja env overrides from template
            if data.startswith(JINJA2_OVERRIDE):
                eol = data.find('\n')
                line = data[len(JINJA2_OVERRIDE):eol]
                data = data[eol+1:]
                for pair in line.split(','):
                    (key,val) = pair.split(':')
                    key = key.strip()
                    setattr(myenv, key, ast.literal_eval(val.strip()))

            #FIXME: add tests
            myenv.filters.update(self._get_filters())
            myenv.tests.update(self._get_tests())

            if escape_backslashes:
                # Allow users to specify backslashes in playbooks as "\\"
                # instead of as "\\\\".
                data = _escape_backslashes(data, myenv)

            try:
                t = myenv.from_string(data)
            except TemplateSyntaxError as e:
                raise AnsibleError("template error while templating string: %s. String: %s" % (to_str(e), to_str(data)))
            except Exception as e:
                if 'recursion' in to_str(e):
                    raise AnsibleError("recursive loop detected in template string: %s" % to_str(data))
                else:
                    return data

            t.globals['lookup']   = self._lookup
            t.globals['finalize'] = self._finalize

            jvars = AnsibleJ2Vars(self, t.globals)

            new_context = t.new_context(jvars, shared=True)
            rf = t.root_render_func(new_context)

            try:
                res = j2_concat(rf)
            except TypeError as te:
                if 'StrictUndefined' in to_str(te):
                    errmsg  = "Unable to look up a name or access an attribute in template string (%s).\n" % to_str(data)
                    errmsg += "Make sure your variable name does not contain invalid characters like '-': %s" % to_str(te)
                    raise AnsibleUndefinedVariable(errmsg)
                else:
                    debug("failing because of a type error, template data is: %s" % to_str(data))
                    raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_str(data),to_str(te)))

            if preserve_trailing_newlines:
                # The low level calls above do not preserve the newline
                # characters at the end of the input data, so we use the
                # calculate the difference in newlines and append them
                # to the resulting output for parity
                #
                # jinja2 added a keep_trailing_newline option in 2.7 when
                # creating an Environment.  That would let us make this code
                # better (remove a single newline if
                # preserve_trailing_newlines is False).  Once we can depend on
                # that version being present, modify our code to set that when
                # initializing self.environment and remove a single trailing
                # newline here if preserve_newlines is False.
                res_newlines = _count_newlines_from_end(res)
                if data_newlines > res_newlines:
                    res += '\n' * (data_newlines - res_newlines)

            return res
        except (UndefinedError, AnsibleUndefinedVariable) as e:
            if fail_on_undefined:
                raise AnsibleUndefinedVariable(e)
            else:
                #TODO: return warning about undefined var
                return data
    def run(self, tmp=None, task_vars=None):
        ''' handler for template operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source = self._task.args.get('src', None)
        dest = self._task.args.get('dest', None)
        faf = self._task.first_available_file
        force = boolean(self._task.args.get('force', True))
        state = self._task.args.get('state', None)

        if state is not None:
            result['failed'] = True
            result['msg'] = "'state' cannot be specified on a template"
        elif (source is None and faf is not None) or dest is None:
            result['failed'] = True
            result['msg'] = "src and dest are required"
        elif faf:
            source = self._get_first_available_file(
                faf, task_vars.get('_original_file', None, 'templates'))
            if source is None:
                result['failed'] = True
                result[
                    'msg'] = "could not find src in first_available_file list"
        else:
            try:
                source = self._find_needle('templates', source)
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_str(e)

        if 'failed' in result:
            return result

        # Expand any user home dir specification
        dest = self._remote_expand_user(dest)

        directory_prepended = False
        if dest.endswith(os.sep):
            directory_prepended = True
            base = os.path.basename(source)
            dest = os.path.join(dest, base)

        # template the source data locally & get ready to transfer
        try:
            with open(source, 'r') as f:
                template_data = to_unicode(f.read())

            try:
                template_uid = pwd.getpwuid(os.stat(source).st_uid).pw_name
            except:
                template_uid = os.stat(source).st_uid

            temp_vars = task_vars.copy()
            temp_vars['template_host'] = os.uname()[1]
            temp_vars['template_path'] = source
            temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(
                os.path.getmtime(source))
            temp_vars['template_uid'] = template_uid
            temp_vars['template_fullpath'] = os.path.abspath(source)
            temp_vars['template_run_date'] = datetime.datetime.now()

            managed_default = C.DEFAULT_MANAGED_STR
            managed_str = managed_default.format(
                host=temp_vars['template_host'],
                uid=temp_vars['template_uid'],
                file=to_bytes(temp_vars['template_path']))
            temp_vars['ansible_managed'] = time.strftime(
                managed_str, time.localtime(os.path.getmtime(source)))

            # Create a new searchpath list to assign to the templar environment's file
            # loader, so that it knows about the other paths to find template files
            searchpath = [self._loader._basedir, os.path.dirname(source)]
            if self._task._role is not None:
                if C.DEFAULT_ROLES_PATH:
                    searchpath[:0] = C.DEFAULT_ROLES_PATH
                searchpath.insert(1, self._task._role._role_path)

            self._templar.environment.loader.searchpath = searchpath

            old_vars = self._templar._available_variables
            self._templar.set_available_variables(temp_vars)
            resultant = self._templar.template(template_data,
                                               preserve_trailing_newlines=True,
                                               escape_backslashes=False,
                                               convert_data=False)
            self._templar.set_available_variables(old_vars)
        except Exception as e:
            result['failed'] = True
            result['msg'] = type(e).__name__ + ": " + str(e)
            return result

        remote_user = task_vars.get(
            'ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        local_checksum = checksum_s(resultant)
        remote_checksum = self.get_checksum(dest,
                                            task_vars,
                                            not directory_prepended,
                                            source=source,
                                            tmp=tmp)
        if isinstance(remote_checksum, dict):
            # Error from remote_checksum is a dict.  Valid return is a str
            result.update(remote_checksum)
            return result

        diff = {}
        new_module_args = self._task.args.copy()

        if (remote_checksum == '1') or (force
                                        and local_checksum != remote_checksum):

            result['changed'] = True
            # if showing diffs, we need to get the remote value
            if self._play_context.diff:
                diff = self._get_diff_data(dest,
                                           resultant,
                                           task_vars,
                                           source_file=False)

            if not self._play_context.check_mode:  # do actual work thorugh copy
                xfered = self._transfer_data(
                    self._connection._shell.join_path(tmp, 'source'),
                    resultant)

                # fix file permissions when the copy is done as a different user
                self._fixup_perms(tmp, remote_user, recursive=True)

                # run the copy module
                new_module_args.update(
                    dict(
                        src=xfered,
                        dest=dest,
                        original_basename=os.path.basename(source),
                        follow=True,
                    ), )
                result.update(
                    self._execute_module(module_name='copy',
                                         module_args=new_module_args,
                                         task_vars=task_vars,
                                         tmp=tmp,
                                         delete_remote_tmp=False))

            if result.get('changed', False) and self._play_context.diff:
                result['diff'] = diff

        else:
            # when running the file module based on the template data, we do
            # not want the source filename (the name of the template) to be used,
            # since this would mess up links, so we clear the src param and tell
            # the module to follow links.  When doing that, we have to set
            # original_basename to the template just in case the dest is
            # a directory.
            new_module_args.update(
                dict(
                    src=None,
                    original_basename=os.path.basename(source),
                    follow=True,
                ), )
            result.update(
                self._execute_module(module_name='file',
                                     module_args=new_module_args,
                                     task_vars=task_vars,
                                     tmp=tmp,
                                     delete_remote_tmp=False))

        self._remove_tmp_path(tmp)

        return result
Exemple #49
0
def makedirs_safe(path, mode=None):
    '''Safe way to create dirs in muliprocess/thread environments'''

    rpath = unfrackpath(path)
    if not os.path.exists(rpath):
        try:
            if mode:
                os.makedirs(rpath, mode)
            else:
                os.makedirs(rpath)
        except OSError as e:
            if e.errno != EEXIST:
                raise AnsibleError("Unable to create local directories(%s): %s" % (rpath, to_str(e)))
Exemple #50
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for unarchive operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        dest    = self._task.args.get('dest', None)
        remote_src = boolean(self._task.args.get('remote_src', False))
        creates = self._task.args.get('creates', None)

        # "copy" is deprecated in favor of "remote_src".
        if 'copy' in self._task.args:
            # They are mutually exclusive.
            if 'remote_src' in self._task.args:
                result['failed'] = True
                result['msg'] = "parameters are mutually exclusive: ('copy', 'remote_src')"
                return result
            # We will take the information from copy and store it in
            # the remote_src var to use later in this file.
            remote_src = not boolean(self._task.args.get('copy'))

        if source is None or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result

        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        if creates:
            # do not run the command if the line contains creates=filename
            # and the filename already exists. This allows idempotence
            # of command executions.
            result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars)
            stat = result.get('stat', None)
            if stat and stat.get('exists', False):
                result['skipped'] = True
                result['msg'] = "skipped, since %s exists" % creates
                self._remove_tmp_path(tmp)
                return result

        dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
        source = os.path.expanduser(source)

        if not remote_src:
            try:
                source = self._loader.get_real_file(self._find_needle('files', source))
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_str(e)
                self._remove_tmp_path(tmp)
                return result

        remote_checksum = self._remote_checksum(dest, all_vars=task_vars, follow=True)
        if remote_checksum == '4':
            result['failed'] = True
            result['msg'] = "python isn't present on the system.  Unable to compute checksum"
            self._remove_tmp_path(tmp)
            return result
        elif remote_checksum != '3':
            result['failed'] = True
            result['msg'] = "dest '%s' must be an existing dir" % dest
            self._remove_tmp_path(tmp)
            return result

        if not remote_src:
            # transfer the file to a remote tmp location
            tmp_src = self._connection._shell.join_path(tmp, 'source')
            self._transfer_file(source, tmp_src)

        # handle diff mode client side
        # handle check mode client side

        if not remote_src:
            # fix file permissions when the copy is done as a different user
            self._fixup_perms((tmp, tmp_src), remote_user)
            # Build temporary module_args.
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    src=tmp_src,
                    original_basename=os.path.basename(source),
                ),
            )

        else:
            new_module_args = self._task.args.copy()
            new_module_args.update(
                dict(
                    original_basename=os.path.basename(source),
                ),
            )

        # execute the unarchive module now, with the updated args
        result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
        self._remove_tmp_path(tmp)
        return result
Exemple #51
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for template operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source = self._task.args.get('src', None)
        dest   = self._task.args.get('dest', None)
        faf    = self._task.first_available_file
        force  = boolean(self._task.args.get('force', True))
        state  = self._task.args.get('state', None)

        if state is not None:
            result['failed'] = True
            result['msg'] = "'state' cannot be specified on a template"
        elif (source is None and faf is not None) or dest is None:
            result['failed'] = True
            result['msg'] = "src and dest are required"
        elif faf:
            source = self._get_first_available_file(faf, task_vars.get('_original_file', None, 'templates'))
            if source is None:
                result['failed'] = True
                result['msg'] = "could not find src in first_available_file list"
        else:
            try:
                source = self._find_needle('templates', source)
            except AnsibleError as e:
                result['failed'] = True
                result['msg'] = to_str(e)

        if 'failed' in result:
            return result

        # Expand any user home dir specification
        dest = self._remote_expand_user(dest)

        directory_prepended = False
        if dest.endswith(os.sep):
            directory_prepended = True
            base = os.path.basename(source)
            dest = os.path.join(dest, base)

        # template the source data locally & get ready to transfer
        try:
            with open(source, 'r') as f:
                template_data = to_unicode(f.read())

            try:
                template_uid = pwd.getpwuid(os.stat(source).st_uid).pw_name
            except:
                template_uid = os.stat(source).st_uid

            temp_vars = task_vars.copy()
            temp_vars['template_host']     = os.uname()[1]
            temp_vars['template_path']     = source
            temp_vars['template_mtime']    = datetime.datetime.fromtimestamp(os.path.getmtime(source))
            temp_vars['template_uid']      = template_uid
            temp_vars['template_fullpath'] = os.path.abspath(source)
            temp_vars['template_run_date'] = datetime.datetime.now()

            managed_default = C.DEFAULT_MANAGED_STR
            managed_str = managed_default.format(
                host = temp_vars['template_host'],
                uid  = temp_vars['template_uid'],
                file = to_bytes(temp_vars['template_path'])
            )
            temp_vars['ansible_managed'] = time.strftime(
                managed_str,
                time.localtime(os.path.getmtime(source))
            )

            # Create a new searchpath list to assign to the templar environment's file
            # loader, so that it knows about the other paths to find template files
            searchpath = [self._loader._basedir, os.path.dirname(source)]
            if self._task._role is not None:
                if C.DEFAULT_ROLES_PATH:
                    searchpath[:0] = C.DEFAULT_ROLES_PATH
                searchpath.insert(1, self._task._role._role_path)

            self._templar.environment.loader.searchpath = searchpath

            old_vars = self._templar._available_variables
            self._templar.set_available_variables(temp_vars)
            resultant = self._templar.template(template_data, preserve_trailing_newlines=True, escape_backslashes=False, convert_data=False)
            self._templar.set_available_variables(old_vars)
        except Exception as e:
            result['failed'] = True
            result['msg'] = type(e).__name__ + ": " + str(e)
            return result

        remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
        if not tmp:
            tmp = self._make_tmp_path(remote_user)
            self._cleanup_remote_tmp = True

        local_checksum = checksum_s(resultant)
        remote_checksum = self.get_checksum(dest, task_vars, not directory_prepended, source=source, tmp=tmp)
        if isinstance(remote_checksum, dict):
            # Error from remote_checksum is a dict.  Valid return is a str
            result.update(remote_checksum)
            return result

        diff = {}
        new_module_args = self._task.args.copy()

        if (remote_checksum == '1') or (force and local_checksum != remote_checksum):

            result['changed'] = True
            # if showing diffs, we need to get the remote value
            if self._play_context.diff:
                diff = self._get_diff_data(dest, resultant, task_vars, source_file=False)

            if not self._play_context.check_mode: # do actual work thorugh copy
                xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant)

                # fix file permissions when the copy is done as a different user
                self._fixup_perms((tmp, xfered), remote_user)

                # run the copy module
                new_module_args.update(
                   dict(
                       src=xfered,
                       dest=dest,
                       original_basename=os.path.basename(source),
                       follow=True,
                    ),
                )
                result.update(self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False))

            if result.get('changed', False) and self._play_context.diff:
                result['diff'] = diff

        else:
            # when running the file module based on the template data, we do
            # not want the source filename (the name of the template) to be used,
            # since this would mess up links, so we clear the src param and tell
            # the module to follow links.  When doing that, we have to set
            # original_basename to the template just in case the dest is
            # a directory.
            new_module_args.update(
                dict(
                    src=None,
                    original_basename=os.path.basename(source),
                    follow=True,
                ),
            )
            result.update(self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False))

        self._remove_tmp_path(tmp)

        return result