def from_yaml(data): if isinstance(data, string_types): # The ``text_type`` call here strips any custom # string wrapper class, so that CSafeLoader can # read the data return yaml_load(text_type(to_text(data, errors='surrogate_or_strict'))) return data
def _get_meta_from_src_dir( b_path, # type: bytes ): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]] galaxy_yml = os.path.join(b_path, _GALAXY_YAML) if not os.path.isfile(galaxy_yml): raise LookupError( "The collection galaxy.yml path '{path!s}' does not exist.". format(path=to_native(galaxy_yml)) ) with open(galaxy_yml, 'rb') as manifest_file_obj: try: manifest = yaml_load(manifest_file_obj) except yaml.error.YAMLError as yaml_err: raise_from( AnsibleError( "Failed to parse the galaxy.yml at '{path!s}' with " 'the following error:\n{err_txt!s}'. format( path=to_native(galaxy_yml), err_txt=to_native(yaml_err), ), ), yaml_err, ) return _normalize_galaxy_yml_manifest(manifest, galaxy_yml)
def requirements(self): """ Returns role requirements """ if self._requirements is None: self._requirements = [] for meta_requirements in self.META_REQUIREMENTS: meta_path = os.path.join(self.path, meta_requirements) if os.path.isfile(meta_path): try: f = open(meta_path, 'r') self._requirements = yaml_load(f) except Exception: display.vvvvv("Unable to load requirements for %s" % self.name) finally: f.close() break if not isinstance(self._requirements, MutableSequence): raise AnsibleParserError( f"Expected role dependencies to be a list. Role {self} has meta/requirements.yml {self._requirements}" ) return self._requirements
def _read_config_yaml_file(self, yml_file): # TODO: handle relative paths as relative to the directory containing the current playbook instead of CWD # Currently this is only used with absolute paths to the `ansible/config` directory yml_file = to_bytes(yml_file) if os.path.exists(yml_file): with open(yml_file, 'rb') as config_def: return yaml_load(config_def) or {} raise AnsibleError( "Missing base YAML definition file (bad install?): %s" % to_native(yml_file))
def _meta_yml_to_dict(yaml_string_data, content_id): """ Converts string YAML dictionary to a Python dictionary. This function may be monkeypatched to another implementation by some tools (eg the import sanity test). :param yaml_string_data: a bytes-ish YAML dictionary :param content_id: a unique ID representing the content to allow other implementations to cache the output :return: a Python dictionary representing the YAML dictionary content """ # NB: content_id is passed in, but not used by this implementation routing_dict = yaml_load(yaml_string_data) if not routing_dict: routing_dict = {} if not isinstance(routing_dict, Mapping): raise ValueError('collection metadata must be an instance of Python Mapping') return routing_dict
def install_info(self): """ Returns role install info """ if self._install_info is None: info_path = os.path.join(self.path, self.META_INSTALL) if os.path.isfile(info_path): try: f = open(info_path, 'r') self._install_info = yaml_load(f) except Exception: display.vvvvv("Unable to load Galaxy install info for %s" % self.name) return False finally: f.close() return self._install_info
def metadata(self): """ Returns role metadata """ if self._metadata is None: for path in self.paths: for meta_main in self.META_MAIN: meta_path = os.path.join(path, meta_main) if os.path.isfile(meta_path): try: with open(meta_path, 'r') as f: self._metadata = yaml_load(f) except Exception: display.vvvvv("Unable to load metadata for %s" % self.name) return False break return self._metadata
def _read(self): action = 'Opened' if not os.path.isfile(self.b_file): # token file not found, create and chmod u+rw open(self.b_file, 'w').close() os.chmod(self.b_file, S_IRUSR | S_IWUSR) # owner has +rw action = 'Created' with open(self.b_file, 'r') as f: config = yaml_load(f) display.vvv('%s %s' % (action, to_text(self.b_file))) if config and not isinstance(config, dict): display.vvv('Galaxy token file %s malformed, unable to read it' % to_text(self.b_file)) return {} return config or {}
def _git_repo_info(repo_path): """ returns a string containing git branch, commit id and commit date """ result = None if os.path.exists(repo_path): # Check if the .git is a file. If it is a file, it means that we are in a submodule structure. if os.path.isfile(repo_path): try: with open(repo_path) as f: gitdir = yaml_load(f).get('gitdir') # There is a possibility the .git file to have an absolute path. if os.path.isabs(gitdir): repo_path = gitdir else: repo_path = os.path.join(repo_path[:-4], gitdir) except (IOError, AttributeError): return '' with open(os.path.join(repo_path, "HEAD")) as f: line = f.readline().rstrip("\n") if line.startswith("ref:"): branch_path = os.path.join(repo_path, line[5:]) else: branch_path = None if branch_path and os.path.exists(branch_path): branch = '/'.join(line.split('/')[2:]) with open(branch_path) as f: commit = f.readline()[:10] else: # detached HEAD commit = line[:10] branch = 'detached HEAD' branch_path = os.path.join(repo_path, "HEAD") date = time.localtime(os.stat(branch_path).st_mtime) if time.daylight == 0: offset = time.timezone else: offset = time.altzone result = "({0} {1}) last updated {2} (GMT {3:+04d})".format( branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36)) else: result = '' return result
def requirements(self): """ Returns role requirements """ if self._requirements is None: self._requirements = [] for meta_requirements in self.META_REQUIREMENTS: meta_path = os.path.join(self.path, meta_requirements) if os.path.isfile(meta_path): try: f = open(meta_path, 'r') self._requirements = yaml_load(f) except Exception: display.vvvvv("Unable to load requirements for %s" % self.name) finally: f.close() break return self._requirements
def _get_meta_from_src_dir( b_path, # type: bytes require_build_metadata=True, # type: bool ): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None, t.Type[Sentinel]]] galaxy_yml = os.path.join(b_path, _GALAXY_YAML) if not os.path.isfile(galaxy_yml): raise LookupError( "The collection galaxy.yml path '{path!s}' does not exist.".format( path=to_native(galaxy_yml))) with open(galaxy_yml, 'rb') as manifest_file_obj: try: manifest = yaml_load(manifest_file_obj) except yaml.error.YAMLError as yaml_err: raise_from( AnsibleError( "Failed to parse the galaxy.yml at '{path!s}' with " 'the following error:\n{err_txt!s}'.format( path=to_native(galaxy_yml), err_txt=to_native(yaml_err), ), ), yaml_err, ) if not isinstance(manifest, dict): if require_build_metadata: raise AnsibleError( f"The collection galaxy.yml at '{to_native(galaxy_yml)}' is incorrectly formatted." ) # Valid build metadata is not required by ansible-galaxy list. Raise ValueError to fall back to implicit metadata. display.warning( f"The collection galaxy.yml at '{to_native(galaxy_yml)}' is incorrectly formatted." ) raise ValueError( f"The collection galaxy.yml at '{to_native(galaxy_yml)}' is incorrectly formatted." ) return _normalize_galaxy_yml_manifest(manifest, galaxy_yml, require_build_metadata)
def get_collections_galaxy_meta_info(): meta_path = os.path.join(os.path.dirname(__file__), 'data', 'collections_galaxy_meta.yml') with open(to_bytes(meta_path, errors='surrogate_or_strict'), 'rb') as galaxy_obj: return yaml_load(galaxy_obj)
def install(self): if self.scm: # create tar file from scm url tmp_file = RoleRequirement.scm_archive_role( keep_scm_meta=context.CLIARGS['keep_scm_meta'], **self.spec) elif self.src: if os.path.isfile(self.src): tmp_file = self.src elif '://' in self.src: role_data = self.src tmp_file = self.fetch(role_data) else: role_data = self.api.lookup_role_by_name(self.src) if not role_data: raise AnsibleError("- sorry, %s was not found on %s." % (self.src, self.api.api_server)) if role_data.get('role_type') == 'APP': # Container Role display.warning( "%s is a Container App role, and should only be installed using Ansible " "Container" % self.name) role_versions = self.api.fetch_role_related( 'versions', role_data['id']) if not self.version: # convert the version names to LooseVersion objects # and sort them to get the latest version. If there # are no versions in the list, we'll grab the head # of the master branch if len(role_versions) > 0: loose_versions = [ LooseVersion(a.get('name', None)) for a in role_versions ] try: loose_versions.sort() except TypeError: raise AnsibleError( 'Unable to compare role versions (%s) to determine the most recent version due to incompatible version formats. ' 'Please contact the role author to resolve versioning conflicts, or specify an explicit role version to ' 'install.' % ', '.join([v.vstring for v in loose_versions])) self.version = to_text(loose_versions[-1]) elif role_data.get('github_branch', None): self.version = role_data['github_branch'] else: self.version = 'master' elif self.version != 'master': if role_versions and to_text(self.version) not in [ a.get('name', None) for a in role_versions ]: raise AnsibleError( "- the specified version (%s) of %s was not found in the list of available versions (%s)." % (self.version, self.name, role_versions)) # check if there's a source link/url for our role_version for role_version in role_versions: if role_version[ 'name'] == self.version and 'source' in role_version: self.src = role_version['source'] if role_version[ 'name'] == self.version and 'download_url' in role_version: self.download_url = role_version['download_url'] tmp_file = self.fetch(role_data) else: raise AnsibleError("No valid role data found") if tmp_file: display.debug("installing from %s" % tmp_file) if not tarfile.is_tarfile(tmp_file): raise AnsibleError( "the downloaded file does not appear to be a valid tar archive." ) else: role_tar_file = tarfile.open(tmp_file, "r") # verify the role's meta file meta_file = None members = role_tar_file.getmembers() # next find the metadata file for member in members: for meta_main in self.META_MAIN: if meta_main in member.name: # Look for parent of meta/main.yml # Due to possibility of sub roles each containing meta/main.yml # look for shortest length parent meta_parent_dir = os.path.dirname( os.path.dirname(member.name)) if not meta_file: archive_parent_dir = meta_parent_dir meta_file = member else: if len(meta_parent_dir) < len( archive_parent_dir): archive_parent_dir = meta_parent_dir meta_file = member if not meta_file: raise AnsibleError( "this role does not appear to have a meta/main.yml file." ) else: try: self._metadata = yaml_load( role_tar_file.extractfile(meta_file)) except Exception: raise AnsibleError( "this role does not appear to have a valid meta/main.yml file." ) paths = self.paths if self.path != paths[0]: # path can be passed though __init__ # FIXME should this be done in __init__? paths[:0] = self.path paths_len = len(paths) for idx, path in enumerate(paths): self.path = path display.display("- extracting %s to %s" % (self.name, self.path)) try: if os.path.exists(self.path): if not os.path.isdir(self.path): raise AnsibleError( "the specified roles path exists and is not a directory." ) elif not context.CLIARGS.get("force", False): raise AnsibleError( "the specified role %s appears to already exist. Use --force to replace it." % self.name) else: # using --force, remove the old path if not self.remove(): raise AnsibleError( "%s doesn't appear to contain a role.\n please remove this directory manually if you really " "want to put the role here." % self.path) else: os.makedirs(self.path) # We strip off any higher-level directories for all of the files # contained within the tar file here. The default is 'github_repo-target'. # Gerrit instances, on the other hand, does not have a parent directory at all. for member in members: # we only extract files, and remove any relative path # bits that might be in the file for security purposes # and drop any containing directory, as mentioned above if member.isreg() or member.issym(): n_member_name = to_native(member.name) n_archive_parent_dir = to_native( archive_parent_dir) n_parts = n_member_name.replace( n_archive_parent_dir, "", 1).split(os.sep) n_final_parts = [] for n_part in n_parts: # TODO if the condition triggers it produces a broken installation. # It will create the parent directory as an empty file and will # explode if the directory contains valid files. # Leaving this as is since the whole module needs a rewrite. if n_part != '..' and not n_part.startswith( '~') and '$' not in n_part: n_final_parts.append(n_part) member.name = os.path.join(*n_final_parts) role_tar_file.extract(member, to_native(self.path)) # write out the install info file for later use self._write_galaxy_install_info() break except OSError as e: if e.errno == errno.EACCES and idx < paths_len - 1: continue raise AnsibleError("Could not update files in %s: %s" % (self.path, to_native(e))) # return the parsed yaml metadata display.display("- %s was installed successfully" % str(self)) if not (self.src and os.path.isfile(self.src)): try: os.unlink(tmp_file) except (OSError, IOError) as e: display.warning(u"Unable to remove tmp file (%s): %s" % (tmp_file, to_text(e))) return True return False