def load_module(self, fullname): if not _meta_yml_to_dict: raise ValueError('assible.utils.collection_loader._meta_yml_to_dict is not set') module = super(_AssibleCollectionPkgLoader, self).load_module(fullname) module._collection_meta = {} # TODO: load collection metadata, cache in __loader__ state collection_name = '.'.join(self._split_name[1:3]) if collection_name == 'assible.builtin': # assible.builtin is a synthetic collection, get its routing config from the Assible distro assible_pkg_path = os.path.dirname(import_module('assible').__file__) metadata_path = os.path.join(assible_pkg_path, 'config/assible_builtin_runtime.yml') with open(to_bytes(metadata_path), 'rb') as fd: raw_routing = fd.read() else: b_routing_meta_path = to_bytes(os.path.join(module.__path__[0], 'meta/runtime.yml')) if os.path.isfile(b_routing_meta_path): with open(b_routing_meta_path, 'rb') as fd: raw_routing = fd.read() else: raw_routing = '' try: if raw_routing: routing_dict = _meta_yml_to_dict(raw_routing, (collection_name, 'runtime.yml')) module._collection_meta = self._canonicalize_meta(routing_dict) except Exception as ex: raise ValueError('error parsing collection metadata: {0}'.format(to_native(ex))) AssibleCollectionConfig.on_collection_load.fire(collection_name=collection_name, collection_path=os.path.dirname(module.__file__)) return module
def yaml_to_dict(yaml, content_id): """ Return a Python dict version of the provided YAML. Conversion is done in a subprocess since the current Python interpreter does not have access to PyYAML. """ if content_id in yaml_to_dict_cache: return yaml_to_dict_cache[content_id] try: cmd = [external_python, yaml_to_json_path] proc = subprocess.Popen([to_bytes(c) for c in cmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout_bytes, stderr_bytes = proc.communicate(to_bytes(yaml)) if proc.returncode != 0: raise Exception( 'command %s failed with return code %d: %s' % ([to_native(c) for c in cmd ], proc.returncode, to_native(stderr_bytes))) data = yaml_to_dict_cache[content_id] = json.loads( to_text(stdout_bytes), object_hook=object_hook) return data except Exception as ex: raise Exception( 'internal importer error - failed to parse yaml: %s' % to_native(ex))
def __init__(self, paths=None, scan_sys_paths=True): # TODO: accept metadata loader override self._assible_pkg_path = to_native(os.path.dirname(to_bytes(sys.modules['assible'].__file__))) if isinstance(paths, string_types): paths = [paths] elif paths is None: paths = [] # expand any placeholders in configured paths paths = [os.path.expanduser(to_native(p, errors='surrogate_or_strict')) for p in paths] if scan_sys_paths: # append all sys.path entries with an assible_collections package for path in sys.path: if ( path not in paths and os.path.isdir(to_bytes( os.path.join(path, 'assible_collections'), errors='surrogate_or_strict', )) ): paths.append(path) self._n_configured_paths = paths self._n_cached_collection_paths = None self._n_cached_collection_qualified_paths = None self._n_playbook_paths = []
def _get_collection_role_path(role_name, collection_list=None): acr = AssibleCollectionRef.try_parse_fqcr(role_name, 'role') if acr: # looks like a valid qualified collection ref; skip the collection_list collection_list = [acr.collection] subdirs = acr.subdirs resource = acr.resource elif not collection_list: return None # not a FQ role and no collection search list spec'd, nothing to do else: resource = role_name # treat as unqualified, loop through the collection search list to try and resolve subdirs = '' for collection_name in collection_list: try: acr = AssibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type='role') # FIXME: error handling/logging; need to catch any import failures and move along pkg = import_module(acr.n_python_package_name) if pkg is not None: # the package is now loaded, get the collection's package and ask where it lives path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict')) return resource, to_text(path, errors='surrogate_or_strict'), collection_name except IOError: continue except Exception as ex: # FIXME: pick out typical import errors first, then error logging continue return None
def get_data(self, path): if not path: raise ValueError('a path must be specified') # TODO: ensure we're being asked for a path below something we own # TODO: try to handle redirects internally? if not path[0] == '/': # relative to current package, search package paths if possible (this may not be necessary) # candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths] raise ValueError('relative resource paths not supported') else: candidate_paths = [path] for p in candidate_paths: b_path = to_bytes(p) if os.path.isfile(b_path): with open(b_path, 'rb') as fd: return fd.read() # HACK: if caller asks for __init__.py and the parent dir exists, return empty string (this keep consistency # with "collection subpackages don't require __init__.py" working everywhere with get_data elif b_path.endswith(b'__init__.py') and os.path.isdir(os.path.dirname(b_path)): return '' return None
def write(self, backup_file=None): """ Write the crontab to the system. Saves all information. """ if backup_file: fileh = open(backup_file, 'wb') elif self.cron_file: fileh = open(self.b_cron_file, 'wb') else: filed, path = tempfile.mkstemp(prefix='crontab') os.chmod(path, int('0644', 8)) fileh = os.fdopen(filed, 'wb') fileh.write(to_bytes(self.render())) fileh.close() # return if making a backup if backup_file: return # Add the entire crontab back to the user crontab if not self.cron_file: # quoting shell args for now but really this should be two non-shell calls. FIXME (rc, out, err) = self.module.run_command(self._write_execute(path), use_unsafe_shell=True) os.unlink(path) if rc != 0: self.module.fail_json(msg=err) # set SELinux permissions if self.module.selinux_enabled() and self.cron_file: self.module.set_default_selinux_context(self.cron_file, False)
def main(): module_args = dict( username=dict(type='str', required=True), password=dict(type='str', required=True, no_log=True), ) module = AssibleModule( argument_spec=module_args, required_together=[('username', 'password')], ) # Debugging purposes, get the Kerberos version. On platforms like OpenSUSE this may not be on the PATH. try: process = subprocess.Popen(['krb5-config', '--version'], stdout=subprocess.PIPE) stdout, stderr = process.communicate() version = to_text(stdout) except OSError as e: if e.errno != errno.ENOENT: raise version = 'Unknown (no krb5-config)' # Heimdal has a few quirks that we want to paper over in this module # 1. KRB5_TRACE does not work in any released version (<=7.7), we need to use a custom krb5.config to enable it # 2. When reading the password it reads from the pty not stdin by default causing an issue with subprocess. We # can control that behaviour with '--password-file=STDIN' is_heimdal = os.uname()[0] in ['Darwin', 'FreeBSD'] kinit_args = ['kinit'] config = {} if is_heimdal: kinit_args.append('--password-file=STDIN') config['logging'] = {'krb5': 'FILE:/dev/stdout'} kinit_args.append( to_text(module.params['username'], errors='surrogate_or_strict')) with krb5_conf(module, config): # Weirdly setting KRB5_CONFIG in the modules environment block does not work unless we pass it in explicitly. # Take a copy of the existing environment to make sure the process has the same env vars as ours. Also set # KRB5_TRACE to output and debug logs helping to identify problems when calling kinit with MIT. kinit_env = os.environ.copy() kinit_env['KRB5_TRACE'] = '/dev/stdout' process = subprocess.Popen(kinit_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=kinit_env) stdout, stderr = process.communicate( to_bytes(module.params['password'], errors='surrogate_or_strict') + b'\n') rc = process.returncode module.exit_json(changed=True, stdout=to_text(stdout), stderr=to_text(stderr), rc=rc, version=version)
def _module_file_from_path(leaf_name, path): has_code = True package_path = os.path.join(to_native(path), to_native(leaf_name)) module_path = None # if the submodule is a package, assemble valid submodule paths, but stop looking for a module if os.path.isdir(to_bytes(package_path)): # is there a package init? module_path = os.path.join(package_path, '__init__.py') if not os.path.isfile(to_bytes(module_path)): module_path = os.path.join(package_path, '__synthetic__') has_code = False else: module_path = package_path + '.py' package_path = None if not os.path.isfile(to_bytes(module_path)): raise ImportError('{0} not found at {1}'.format(leaf_name, path)) return module_path, has_code, package_path
def __init__(self, module, user=None, cron_file=None): self.module = module self.user = user self.root = (os.getuid() == 0) self.lines = None self.assible = "#Assible: " self.n_existing = '' self.cron_cmd = self.module.get_bin_path('crontab', required=True) if cron_file: if os.path.isabs(cron_file): self.cron_file = cron_file self.b_cron_file = to_bytes(cron_file, errors='surrogate_or_strict') else: self.cron_file = os.path.join('/etc/cron.d', cron_file) self.b_cron_file = os.path.join( b'/etc/cron.d', to_bytes(cron_file, errors='surrogate_or_strict')) else: self.cron_file = None self.read()
def _get_collection_name_from_path(path): """ Return the containing collection name for a given path, or None if the path is not below a configured collection, or the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured collection roots). :param path: path to evaluate for collection containment :return: collection name or None """ # FIXME: mess with realpath canonicalization or not? path = to_native(path) path_parts = path.split('/') if path_parts.count('assible_collections') != 1: return None ac_pos = path_parts.index('assible_collections') # make sure it's followed by at least a namespace and collection name if len(path_parts) < ac_pos + 3: return None candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3]) try: # we've got a name for it, now see if the path prefix matches what the loader sees imported_pkg_path = to_native(os.path.dirname(to_bytes(import_module('assible_collections.' + candidate_collection_name).__file__))) except ImportError: return None # reassemble the original path prefix up the collection name, and it should match what we just imported. If not # this is probably a collection root that's not configured. original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3]) if original_path_prefix != imported_pkg_path: return None return candidate_collection_name
def test_to_bytes_unsafe(): assert isinstance(to_bytes(AssibleUnsafeText(u'foo')), AssibleUnsafeBytes) assert to_bytes(AssibleUnsafeText(u'foo')) == AssibleUnsafeBytes(b'foo')
def test_to_bytes(in_string, encoding, expected): """test happy path of encoding to bytes""" assert to_bytes(in_string, encoding) == expected
def _get_subpackage_search_paths(self, candidate_paths): # filter candidate paths for existence (NB: silently ignoring package init code and same-named modules) return [p for p in candidate_paths if os.path.isdir(to_bytes(p))]