Example #1
0
    def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
        """ run a command on the jail.  This is only needed for implementing
        put_file() get_file() so that we don't have to read the whole file
        into memory.

        compared to exec_command() it looses some niceties like being able to
        return the process's exit code immediately.
        """

        local_cmd = [self.jexec_cmd]
        set_env = ''

        if self._play_context.remote_user is not None:
            local_cmd += ['-U', self._play_context.remote_user]
            # update HOME since -U does not update the jail environment
            set_env = 'HOME=~' + self._play_context.remote_user + ' '

        local_cmd += [self.jail, self._play_context.executable, '-c', set_env + cmd]

        display.vvv("EXEC %s" % (local_cmd,), host=self.jail)
        local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
        p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)

        return p
Example #2
0
    def put_file(self, in_path, out_path):
        """ transfer a file from local to jail """
        super(Connection, self).put_file(in_path, out_path)
        display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)

        out_path = shlex_quote(self._prefix_login_path(out_path))
        try:
            with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
                if not os.fstat(in_file.fileno()).st_size:
                    count = ' count=0'
                else:
                    count = ''
                try:
                    p = self._buffered_exec_command('dd of=%s bs=%s%s' % (out_path, BUFSIZE, count), stdin=in_file)
                except OSError:
                    raise AnsibleError("jail connection requires dd command in the jail")
                try:
                    stdout, stderr = p.communicate()
                except Exception:
                    traceback.print_exc()
                    raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
                if p.returncode != 0:
                    raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, to_native(stdout), to_native(stderr)))
        except IOError:
            raise AnsibleError("file or module does not exist at: %s" % in_path)
Example #3
0
    def write(self, backup_file=None):
        """
        Write the crontab to the system. Saves all information.
        """
        if backup_file:
            fileh = open(backup_file, 'wb')
        elif self.cron_file:
            fileh = open(self.b_cron_file, 'wb')
        else:
            filed, path = tempfile.mkstemp(prefix='crontab')
            os.chmod(path, int('0644', 8))
            fileh = os.fdopen(filed, 'wb')

        fileh.write(to_bytes(self.render()))
        fileh.close()

        # return if making a backup
        if backup_file:
            return

        # Add the entire crontab back to the user crontab
        if not self.cron_file:
            # quoting shell args for now but really this should be two non-shell calls.  FIXME
            (rc, out, err) = self.module.run_command(self._write_execute(path),
                                                     use_unsafe_shell=True)
            os.unlink(path)

            if rc != 0:
                self.module.fail_json(msg=err)

        # set SELinux permissions
        if self.module.selinux_enabled() and self.cron_file:
            self.module.set_default_selinux_context(self.cron_file, False)
Example #4
0
def check_output2(*popenargs, **kwargs):
    if 'stdout' in kwargs:
        raise ValueError('stdout argument not allowed, it will be overridden.')
    if 'stderr' in kwargs:
        raise ValueError('stderr argument not allowed, it will be overridden.')
    if 'input' in kwargs:
        if 'stdin' in kwargs:
            raise ValueError('stdin and input arguments may not both be used.')
        b_inputdata = to_bytes(kwargs['input'], errors='surrogate_or_strict')
        del kwargs['input']
        kwargs['stdin'] = subprocess.PIPE
    else:
        b_inputdata = None
    process = subprocess.Popen(*popenargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
    try:
        b_out, b_err = process.communicate(b_inputdata)
    except Exception:
        process.kill()
        process.wait()
        raise
    retcode = process.poll()
    if retcode != 0 or \
            b'encryption failed: Unusable public key' in b_out or \
            b'encryption failed: Unusable public key' in b_err:
        cmd = kwargs.get("args")
        if cmd is None:
            cmd = popenargs[0]
        raise subprocess.CalledProcessError(
            retcode,
            cmd,
            to_native(b_out + b_err, errors='surrogate_or_strict')
        )
    return b_out
    def get_data(self, path):
        if not path:
            raise ValueError('a path must be specified')

        # TODO: ensure we're being asked for a path below something we own
        # TODO: try to handle redirects internally?

        if not path[0] == '/':
            # relative to current package, search package paths if possible (this may not be necessary)
            # candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths]
            raise ValueError('relative resource paths not supported')
        else:
            candidate_paths = [path]

        for p in candidate_paths:
            b_path = to_bytes(p)
            if os.path.isfile(b_path):
                with open(b_path, 'rb') as fd:
                    return fd.read()
            # HACK: if caller asks for __init__.py and the parent dir exists, return empty string (this keep consistency
            # with "collection subpackages don't require __init__.py" working everywhere with get_data
            elif b_path.endswith(b'__init__.py') and os.path.isdir(os.path.dirname(b_path)):
                return ''

        return None
    def _send_content(
        self,
        request_type: str,
        content: Dict[str, Any],
    ):
        content['type'] = request_type
        content['seq'] = self._seq_num
        json_content = json.dumps(content, separators=(',', ':'))
        b_content = to_bytes(json_content, errors='surrogate_or_strict')
        # print(json_content)

        response = b'\r\n'.join([
            b'Content-Length: %s' % to_bytes(len(b_content)),
            b'',
            b_content,
        ])
        self._out_buffer += response
Example #7
0
 def _run(self, args, stdin=None, expected_rc=0):
     p = Popen([self.cli_path] + args, stdout=PIPE, stderr=PIPE, stdin=PIPE)
     out, err = p.communicate(to_bytes(stdin))
     rc = p.wait()
     if rc != expected_rc:
         raise LPassException(err)
     return to_text(out, errors='surrogate_or_strict'), to_text(
         err, errors='surrogate_or_strict')
Example #8
0
 def read_shelve(self, shelve_filename, key):
     """
     Read the value of "key" from a shelve file
     """
     d = shelve.open(to_bytes(shelve_filename))
     res = d.get(key, None)
     d.close()
     return res
def set_module_args(args):
    if '_ansible_remote_tmp' not in args:
        args['_ansible_remote_tmp'] = '/tmp'
    if '_ansible_keep_remote_files' not in args:
        args['_ansible_keep_remote_files'] = False

    args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
    basic._ANSIBLE_ARGS = to_bytes(args)
Example #10
0
 def get_raw(self, item_id, vault=None):
     args = ["get", "item", item_id]
     if vault is not None:
         args += ['--vault={0}'.format(vault)]
     if not self.logged_in:
         args += [to_bytes('--session=') + self.token]
     rc, output, dummy = self._run(args)
     return output
 def _get_ocsp_must_staple(self):
     extensions = self.csr.get_extensions()
     oms_ext = [
         ext for ext in extensions
         if to_bytes(ext.get_short_name()) == OPENSSL_MUST_STAPLE_NAME
         and to_bytes(ext) == OPENSSL_MUST_STAPLE_VALUE
     ]
     if OpenSSL.SSL.OPENSSL_VERSION_NUMBER < 0x10100000:
         # Older versions of libssl don't know about OCSP Must Staple
         oms_ext.extend([
             ext for ext in extensions if ext.get_short_name() == b'UNDEF'
             and ext.get_data() == b'\x30\x03\x02\x01\x05'
         ])
     if oms_ext:
         return True, bool(oms_ext[0].get_critical())
     else:
         return None, False
    def get(self):

        result_dict = {}

        try:
            all_parms = [
                CLIPASSWORDSDK_CMD,
                'GetPassword',
                '-p', 'AppDescs.AppID=%s' % self.appid,
                '-p', 'Query=%s' % self.query,
                '-o', self.output,
                '-d', self.b_delimiter]
            all_parms.extend(self.extra_parms)

            b_credential = b""
            b_all_params = [to_bytes(v) for v in all_parms]
            tmp_output, tmp_error = Popen(b_all_params, stdout=PIPE, stderr=PIPE, stdin=PIPE).communicate()

            if tmp_output:
                b_credential = to_bytes(tmp_output)

            if tmp_error:
                raise AnsibleError("ERROR => %s " % (tmp_error))

            if b_credential and b_credential.endswith(b'\n'):
                b_credential = b_credential[:-1]

            output_names = self.output.split(",")
            output_values = b_credential.split(self.b_delimiter)

            for i in range(len(output_names)):
                if output_names[i].startswith("passprops."):
                    if "passprops" not in result_dict:
                        result_dict["passprops"] = {}
                    output_prop_name = output_names[i][10:]
                    result_dict["passprops"][output_prop_name] = to_native(output_values[i])
                else:
                    result_dict[output_names[i]] = to_native(output_values[i])

        except subprocess.CalledProcessError as e:
            raise AnsibleError(e.output)
        except OSError as e:
            raise AnsibleError("ERROR - AIM not installed or clipasswordsdk not in standard location. ERROR=(%s) => %s " % (to_text(e.errno), e.strerror))

        return [result_dict]
Example #13
0
    def check_password_prompt(self, b_output):
        ''' checks if the expected password prompt exists in b_output '''

        prompts = self.get_option('prompt_l10n') or [
            "Kerberos password for .*@.*:"
        ]
        b_prompt = b"|".join(to_bytes(p) for p in prompts)

        return bool(re.match(b_prompt, b_output))
Example #14
0
    def check_password_prompt(self, b_output):
        ''' checks if the expected password prompt exists in b_output '''

        # FIXME: more accurate would be: 'doas (%s@' % remote_user
        # however become plugins don't have that information currently
        b_prompts = [to_bytes(p) for p in self.get_option('prompt_l10n')] or [br'doas \(', br'Password:']
        b_prompt = b"|".join(b_prompts)

        return bool(re.match(b_prompt, b_output))
 def _check_ocspMustStaple(extensions):
     oms_ext = [
         ext for ext in extensions
         if to_bytes(ext.get_short_name()) == OPENSSL_MUST_STAPLE_NAME
         and to_bytes(ext) == OPENSSL_MUST_STAPLE_VALUE
     ]
     if OpenSSL.SSL.OPENSSL_VERSION_NUMBER < 0x10100000:
         # Older versions of libssl don't know about OCSP Must Staple
         oms_ext.extend([
             ext for ext in extensions
             if ext.get_short_name() == b'UNDEF'
             and ext.get_data() == b'\x30\x03\x02\x01\x05'
         ])
     if self.ocspMustStaple:
         return len(oms_ext) > 0 and oms_ext[0].get_critical(
         ) == self.ocspMustStaple_critical
     else:
         return len(oms_ext) == 0
def pyopenssl_normalize_name(name, short=False):
    nid = OpenSSL._util.lib.OBJ_txt2nid(to_bytes(name))
    if nid != 0:
        b_name = OpenSSL._util.lib.OBJ_nid2ln(nid)
        name = to_text(OpenSSL._util.ffi.string(b_name))
    if short:
        return NORMALIZE_NAMES_SHORT.get(name, name)
    else:
        return NORMALIZE_NAMES.get(name, name)
Example #17
0
    def parse(self, inventory, loader, path, cache=True):

        try:
            self._vbox_path = get_bin_path(self.VBOX)
        except ValueError as e:
            raise AnsibleParserError(e)

        super(InventoryModule, self).parse(inventory, loader, path)

        cache_key = self.get_cache_key(path)

        config_data = self._read_config_data(path)

        # set _options from config data
        self._consume_options(config_data)

        source_data = None
        if cache:
            cache = self.get_option('cache')

        update_cache = False
        if cache:
            try:
                source_data = self._cache[cache_key]
            except KeyError:
                update_cache = True

        if not source_data:
            b_pwfile = to_bytes(self.get_option('settings_password_file'),
                                errors='surrogate_or_strict',
                                nonstring='passthru')
            running = self.get_option('running_only')

            # start getting data
            cmd = [self._vbox_path, b'list', b'-l']
            if running:
                cmd.append(b'runningvms')
            else:
                cmd.append(b'vms')

            if b_pwfile and os.path.exists(b_pwfile):
                cmd.append(b'--settingspwfile')
                cmd.append(b_pwfile)

            try:
                p = Popen(cmd, stdout=PIPE)
            except Exception as e:
                raise AnsibleParserError(to_native(e))

            source_data = p.stdout.read().splitlines()

        using_current_cache = cache and not update_cache
        cacheable_results = self._populate_from_source(source_data,
                                                       using_current_cache)

        if update_cache:
            self._cache[cache_key] = cacheable_results
Example #18
0
def main():
    module_args = dict(
        username=dict(type='str', required=True),
        password=dict(type='str', required=True, no_log=True),
    )
    module = AnsibleModule(
        argument_spec=module_args,
        required_together=[('username', 'password')],
    )

    # Debugging purposes, get the Kerberos version. On platforms like OpenSUSE this may not be on the PATH.
    try:
        process = subprocess.Popen(['krb5-config', '--version'],
                                   stdout=subprocess.PIPE)
        stdout, stderr = process.communicate()
        version = to_text(stdout)
    except OSError as e:
        if e.errno != errno.ENOENT:
            raise
        version = 'Unknown (no krb5-config)'

    # Heimdal has a few quirks that we want to paper over in this module
    #     1. KRB5_TRACE does not work in any released version (<=7.7), we need to use a custom krb5.config to enable it
    #     2. When reading the password it reads from the pty not stdin by default causing an issue with subprocess. We
    #        can control that behaviour with '--password-file=STDIN'
    is_heimdal = os.uname()[0] in ['Darwin', 'FreeBSD']

    kinit_args = ['kinit']
    config = {}
    if is_heimdal:
        kinit_args.append('--password-file=STDIN')
        config['logging'] = {'krb5': 'FILE:/dev/stdout'}
    kinit_args.append(
        to_text(module.params['username'], errors='surrogate_or_strict'))

    with krb5_conf(module, config):
        # Weirdly setting KRB5_CONFIG in the modules environment block does not work unless we pass it in explicitly.
        # Take a copy of the existing environment to make sure the process has the same env vars as ours. Also set
        # KRB5_TRACE to output and debug logs helping to identify problems when calling kinit with MIT.
        kinit_env = os.environ.copy()
        kinit_env['KRB5_TRACE'] = '/dev/stdout'

        process = subprocess.Popen(kinit_args,
                                   stdin=subprocess.PIPE,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   env=kinit_env)
        stdout, stderr = process.communicate(
            to_bytes(module.params['password'], errors='surrogate_or_strict') +
            b'\n')
        rc = process.returncode

    module.exit_json(changed=True,
                     stdout=to_text(stdout),
                     stderr=to_text(stderr),
                     rc=rc,
                     version=version)
Example #19
0
def _get_collection_name_from_path(path):
    """
    Return the containing collection name for a given path, or None if the path is not below a configured collection, or
    the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
    collection roots).
    :param path: path to evaluate for collection containment
    :return: collection name or None
    """

    # ensure we compare full paths since pkg path will be abspath
    path = to_native(os.path.abspath(to_bytes(path)))

    path_parts = path.split('/')
    if path_parts.count('ansible_collections') != 1:
        return None

    ac_pos = path_parts.index('ansible_collections')

    # make sure it's followed by at least a namespace and collection name
    if len(path_parts) < ac_pos + 3:
        return None

    candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3])

    try:
        # we've got a name for it, now see if the path prefix matches what the loader sees
        imported_pkg_path = to_native(
            os.path.dirname(
                to_bytes(
                    import_module('ansible_collections.' +
                                  candidate_collection_name).__file__)))
    except ImportError:
        return None

    # reassemble the original path prefix up the collection name, and it should match what we just imported. If not
    # this is probably a collection root that's not configured.

    original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3])

    imported_pkg_path = to_native(os.path.abspath(to_bytes(imported_pkg_path)))
    if original_path_prefix != imported_pkg_path:
        return None

    return candidate_collection_name
Example #20
0
    def put_file(self, in_path, out_path):
        """ Transfer a file from local to docker container """
        self._set_conn_data()
        super(Connection, self).put_file(in_path, out_path)
        display.vvv("PUT %s TO %s" % (in_path, out_path),
                    host=self.get_option('remote_addr'))

        out_path = self._prefix_login_path(out_path)
        if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
            raise AnsibleFileNotFound("file or module does not exist: %s" %
                                      to_native(in_path))

        out_path = shlex_quote(out_path)
        # Older docker doesn't have native support for copying files into
        # running containers, so we use docker exec to implement this
        # Although docker version 1.8 and later provide support, the
        # owner and group of the files are always set to root
        with open(to_bytes(in_path, errors='surrogate_or_strict'),
                  'rb') as in_file:
            if not os.fstat(in_file.fileno()).st_size:
                count = ' count=0'
            else:
                count = ''
            args = self._build_exec_cmd([
                self._play_context.executable, "-c",
                "dd of=%s bs=%s%s" % (out_path, BUFSIZE, count)
            ])
            args = [to_bytes(i, errors='surrogate_or_strict') for i in args]
            try:
                p = subprocess.Popen(args,
                                     stdin=in_file,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE)
            except OSError:
                raise AnsibleError(
                    "docker connection requires dd command in the container to put files"
                )
            stdout, stderr = p.communicate()

            if p.returncode != 0:
                raise AnsibleError(
                    "failed to transfer file %s to %s:\n%s\n%s" %
                    (to_native(in_path), to_native(out_path),
                     to_native(stdout), to_native(stderr)))
    def _module_file_from_path(leaf_name, path):
        has_code = True
        package_path = os.path.join(to_native(path), to_native(leaf_name))
        module_path = None

        # if the submodule is a package, assemble valid submodule paths, but stop looking for a module
        if os.path.isdir(to_bytes(package_path)):
            # is there a package init?
            module_path = os.path.join(package_path, '__init__.py')
            if not os.path.isfile(to_bytes(module_path)):
                module_path = os.path.join(package_path, '__synthetic__')
                has_code = False
        else:
            module_path = package_path + '.py'
            package_path = None
            if not os.path.isfile(to_bytes(module_path)):
                raise ImportError('{0} not found at {1}'.format(leaf_name, path))

        return module_path, has_code, package_path
Example #22
0
    def get_zone_path(self):
        # solaris10vm# zoneadm -z cswbuild list -p
        # -:cswbuild:installed:/zones/cswbuild:479f3c4b-d0c6-e97b-cd04-fd58f2c0238e:native:shared
        process = subprocess.Popen([self.zoneadm_cmd, '-z', to_bytes(self.zone), 'list', '-p'],
                                   stdin=subprocess.PIPE,
                                   stdout=subprocess.PIPE, stderr=subprocess.PIPE)

        # stdout, stderr = p.communicate()
        path = process.stdout.readlines()[0].split(':')[3]
        return path + '/root'
Example #23
0
def _get_collection_path(collection_name):
    collection_name = to_native(collection_name)
    if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2:
        raise ValueError('collection_name must be a non-empty string of the form namespace.collection')
    try:
        collection_pkg = import_module('ansible_collections.' + collection_name)
    except ImportError:
        raise ValueError('unable to locate collection {0}'.format(collection_name))

    return to_native(os.path.dirname(to_bytes(collection_pkg.__file__)))
 def _check_keyUsage_(extensions, extName, expected, critical):
     usages_ext = [
         ext for ext in extensions if ext.get_short_name() == extName
     ]
     if (not usages_ext and expected) or (usages_ext and not expected):
         return False
     elif not usages_ext and not expected:
         return True
     else:
         current = [
             OpenSSL._util.lib.OBJ_txt2nid(to_bytes(usage.strip()))
             for usage in str(usages_ext[0]).split(',')
         ]
         expected = [
             OpenSSL._util.lib.OBJ_txt2nid(to_bytes(usage))
             for usage in expected
         ]
         return set(current) == set(
             expected) and usages_ext[0].get_critical() == critical
Example #25
0
    def load_module(self, fullname):
        if not _meta_yml_to_dict:
            raise ValueError(
                'ansible.utils.collection_loader._meta_yml_to_dict is not set')

        module = super(_AnsibleCollectionPkgLoader, self).load_module(fullname)

        module._collection_meta = {}
        # TODO: load collection metadata, cache in __loader__ state

        collection_name = '.'.join(self._split_name[1:3])

        if collection_name == 'ansible.builtin':
            # ansible.builtin is a synthetic collection, get its routing config from the Ansible distro
            ansible_pkg_path = os.path.dirname(
                import_module('ansible').__file__)
            metadata_path = os.path.join(ansible_pkg_path,
                                         'config/ansible_builtin_runtime.yml')
            with open(to_bytes(metadata_path), 'rb') as fd:
                raw_routing = fd.read()
        else:
            b_routing_meta_path = to_bytes(
                os.path.join(module.__path__[0], 'meta/runtime.yml'))
            if os.path.isfile(b_routing_meta_path):
                with open(b_routing_meta_path, 'rb') as fd:
                    raw_routing = fd.read()
            else:
                raw_routing = ''
        try:
            if raw_routing:
                routing_dict = _meta_yml_to_dict(
                    raw_routing, (collection_name, 'runtime.yml'))
                module._collection_meta = self._canonicalize_meta(routing_dict)
        except Exception as ex:
            raise ValueError('error parsing collection metadata: {0}'.format(
                to_native(ex)))

        AnsibleCollectionConfig.on_collection_load.fire(
            collection_name=collection_name,
            collection_path=os.path.dirname(module.__file__))

        return module
    def get_validation_data(self, client, identifier_type, identifier):
        token = re.sub(r"[^A-Za-z0-9_\-]", "_", self.token)
        key_authorization = create_key_authorization(client, token)

        if self.type == 'http-01':
            # https://tools.ietf.org/html/rfc8555#section-8.3
            return {
                'resource': '.well-known/acme-challenge/{token}'.format(token=token),
                'resource_value': key_authorization,
            }

        if self.type == 'dns-01':
            if identifier_type != 'dns':
                return None
            # https://tools.ietf.org/html/rfc8555#section-8.4
            resource = '_acme-challenge'
            value = nopad_b64(hashlib.sha256(to_bytes(key_authorization)).digest())
            record = (resource + identifier[1:]) if identifier.startswith('*.') else '{0}.{1}'.format(resource, identifier)
            return {
                'resource': resource,
                'resource_value': value,
                'record': record,
            }

        if self.type == 'tls-alpn-01':
            # https://www.rfc-editor.org/rfc/rfc8737.html#section-3
            if identifier_type == 'ip':
                # IPv4/IPv6 address: use reverse mapping (RFC1034, RFC3596)
                resource = compat_ipaddress.ip_address(identifier).reverse_pointer
                if not resource.endswith('.'):
                    resource += '.'
            else:
                resource = identifier
            value = base64.b64encode(hashlib.sha256(to_bytes(key_authorization)).digest())
            return {
                'resource': resource,
                'resource_original': combine_identifier(identifier_type, identifier),
                'resource_value': value,
            }

        # Unknown challenge type: ignore
        return None
Example #27
0
    def __init__(self, module, user=None, cron_file=None):
        self.module = module
        self.user = user
        self.root = (os.getuid() == 0)
        self.lines = None
        self.ansible = "#Ansible: "
        self.n_existing = ''
        self.cron_cmd = self.module.get_bin_path('crontab', required=True)

        if cron_file:
            if os.path.isabs(cron_file):
                self.cron_file = cron_file
                self.b_cron_file = to_bytes(cron_file, errors='surrogate_or_strict')
            else:
                self.cron_file = os.path.join('/etc/cron.d', cron_file)
                self.b_cron_file = os.path.join(b'/etc/cron.d', to_bytes(cron_file, errors='surrogate_or_strict'))
        else:
            self.cron_file = None

        self.read()
 def load(self, path):
     '''
     Load lists of PEM certificates from a file or a directory.
     '''
     b_path = to_bytes(path, errors='surrogate_or_strict')
     if os.path.isdir(b_path):
         for directory, dummy, files in os.walk(b_path, followlinks=True):
             for file in files:
                 self._load_file(os.path.join(directory, file))
     else:
         self._load_file(b_path)
Example #29
0
    def _load_attrs(self):
        """ Turn attribute's value to array. """
        attrs = {}

        for name, value in self.module.params['attributes'].items():
            if isinstance(value, list):
                attrs[name] = list(map(to_bytes, value))
            else:
                attrs[name] = [to_bytes(value)]

        return attrs
Example #30
0
        def yaml_to_dict(yaml, content_id):
            """
            Return a Python dict version of the provided YAML.
            Conversion is done in a subprocess since the current Python interpreter does not have access to PyYAML.
            """
            if content_id in yaml_to_dict_cache:
                return yaml_to_dict_cache[content_id]

            try:
                cmd = [external_python, yaml_to_json_path]
                proc = subprocess.Popen([to_bytes(c) for c in cmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                stdout_bytes, stderr_bytes = proc.communicate(to_bytes(yaml))

                if proc.returncode != 0:
                    raise Exception('command %s failed with return code %d: %s' % ([to_native(c) for c in cmd], proc.returncode, to_native(stderr_bytes)))

                data = yaml_to_dict_cache[content_id] = json.loads(to_text(stdout_bytes), object_hook=object_hook)

                return data
            except Exception as ex:
                raise Exception('internal importer error - failed to parse yaml: %s' % to_native(ex))