def select_version(current: str, versions: Sequence[str]) -> str: """Chooses the most recent, acceptable version out of **versions** - must be newer than current (as defined by conda VersionOrder) - may only be a pre-release if current is pre-release (as defined by parse_version) - may only be "Legacy" (=strange) if current is Legacy (as defined by parse_version) """ current_version = parse_version(current) current_is_legacy = isinstance(current_version, LegacyVersion) latest_vo = VersionOrder(current) latest = current for vers in versions: if "-" in vers: # ignore versions with local (FIXME) continue vers_version = parse_version(vers) # allow prerelease only if current is prerelease if vers_version.is_prerelease and not current_version.is_prerelease: continue # allow legacy only if current is legacy vers_is_legacy = isinstance(vers_version, LegacyVersion) if vers_is_legacy and not current_is_legacy: continue # using conda version order here as that's what will be # used by the package manager vers_vo = VersionOrder(vers) if vers_vo > latest_vo: latest_vo = vers_vo latest = vers return latest
async def apply(self, recipe: Recipe): logger.debug("Checking for updates to %s - %s", recipe, recipe.version) # scan for available versions versions = await self.get_version_map(recipe) # (too slow) conflicts = self.check_version_pin_conflict(recipe, versions) # select apropriate most current version latest = self.select_version(recipe.version, versions.keys()) # add data for respective versions to recipe and recipe.orig recipe.version_data = versions[latest] if recipe.orig.version in versions: recipe.orig.version_data = versions[recipe.orig.version] else: recipe.orig.version_data = {} # check if the recipe is up to date if VersionOrder(latest) == VersionOrder(recipe.version): if not recipe.on_branch: raise self.UpToDate(recipe) # Update `url:`s without Jinja expressions (plain text) for fname in versions[latest]: recipe.replace(fname, versions[latest][fname]['link'], within=["source"]) # Update the version number itself. This will also usually update # `url:`s expressed with `{{version}}` tags. if not recipe.replace(recipe.version, latest, within=["package"]): # allow changes between dash/dot/underscore if recipe.replace(recipe.version, latest, within=["package"], with_fuzz=True): logger.warning("Recipe %s: replaced version with fuzz", recipe) recipe.reset_buildnumber() recipe.render() # Verify that the rendered recipe has the right version number if VersionOrder(recipe.version) != VersionOrder(latest): raise self.UpdateVersionFailure(recipe, recipe.orig.version, latest) # Verify that every url was modified for src, osrc in zip(ensure_list(recipe.meta['source']), ensure_list(recipe.orig.meta['source'])): for url, ourl in zip(ensure_list(src['url']), ensure_list(osrc['url'])): if url == ourl: raise self.UrlNotVersioned(recipe) return recipe
def _version_order( v: Union[str, float], ordering: Optional[List[str]] = None ) -> Union[int, VersionOrder, float]: if ordering is not None: return ordering.index(v) else: try: return VersionOrder(v) except: return v
def _version_order( v: Union[str, float], ordering: Optional[List[str]] = None ) -> Union[int, VersionOrder, float]: if ordering is not None: return ordering.index(v) else: if isinstance(v, str): v = v.replace(" ", ".").replace("*", "1") try: return VersionOrder(v) except: return v
def combine(old_data, new_data): if old_data is None: data = new_data else: data = {} newer = VersionOrder(old_data.get("version", "0")) < VersionOrder( new_data.get("version", "0")) for field in CHANNELDATA_BINARY_FIELDS: data[field] = any( (new_data.get(field, False), old_data.get(field, False))) for field in ("keywords", "identifiers", "tags"): if newer and new_data.get(field): data[field] = new_data[field] else: data[field] = old_data.get(field, {}) for field in CHANNELDATA_OPTIONAL_FIELDS: if newer and field in new_data: data[field] = new_data[field] elif field in old_data: data[field] = old_data[field] run_exports = old_data.get("run_exports", {}) if "run_exports" in new_data: if new_data["run_exports"]: run_exports[new_data["version"]] = new_data["run_exports"] data["run_exports"] = run_exports data["timestamp"] = max(old_data.get("timestamp", 0), new_data.get("timestamp", 0)) data["subdirs"] = sorted( list( set(new_data.get("subdirs", [])) | set(old_data.get("subdirs", [])))) data = dict(sorted(data.items(), key=lambda item: item[0])) return data
def compare_recipe_to_pypi(recipe): """ If it looks like a PyPI package, returns a tuple of (name, current_bioconda_version, latest_version_on_PyPI, needs_update). If it doesn't look like a PyPI package, then return None. If it looks like a PyPI package but the PyPI info can't be found (e.g., "python-wget") then a tuple is returned but with a value of None for the latest version on PyPI and None for needs_update. """ meta = utils.load_meta(os.path.join(recipe, 'meta.yaml')).meta current = meta['package']['version'] name = meta['package']['name'] try: source_url = meta['source']['url'] except KeyError: return if 'pypi' in source_url: pypi = requests.get('http://pypi.python.org/pypi/' + name + '/json') if pypi.status_code == 200: contents = pypi.json() latest = contents['info']['version'] needs_update = False if VersionOrder(latest) > VersionOrder(current): needs_update = True return (name, current, latest, needs_update) # We could do something like strip a leading `python-` off the name # (e.g., for python-wget) but this won't work in all cases and there # aren't that many of them anyway. So we just report that nothing was # found. else: return (name, current, None, None)
def generate_readme(recipe_basedir, output_dir, folder, repodata, renderer): """Generates README.rst for the recipe in folder Args: folder: Toplevel folder name in recipes directory repodata: RepoData object renderer: Renderer object Returns: List of template_options for each concurrent version for which meta.yaml files exist in the recipe folder and its subfolders """ output_file = op.join(output_dir, folder, 'README.rst') # Select meta yaml meta_fname = op.join(recipe_basedir, folder, 'meta.yaml') if not op.exists(meta_fname): for item in os.listdir(op.join(recipe_basedir, folder)): dname = op.join(recipe_basedir, folder, item) if op.isdir(dname): fname = op.join(dname, 'meta.yaml') if op.exists(fname): meta_fname = fname break else: logger.error("No 'meta.yaml' found in %s", folder) return [] meta_relpath = meta_fname[len(recipe_basedir) + 1:] # Read the meta.yaml file(s) try: recipe = Recipe.from_file(recipe_basedir, meta_fname) except RecipeError as e: logger.error("Unable to process %s: %s", meta_fname, e) return [] # Format the README packages = [] for package in sorted(list(set(recipe.package_names))): versions_in_channel = set( repodata.get_package_data(['version', 'build_number'], channels='bioconda', name=package)) sorted_versions = sorted(versions_in_channel, key=lambda x: (VersionOrder(x[0]), x[1]), reverse=True) if sorted_versions: depends = [ depstring.split(' ', 1) if ' ' in depstring else (depstring, '') for depstring in repodata.get_package_data( 'depends', name=package, version=sorted_versions[0][0], build_number=sorted_versions[0][1], )[0] ] else: depends = [] packages.append({ 'name': package, 'versions': ['-'.join(str(w) for w in v) for v in sorted_versions], 'depends': depends, }) template_options = { 'name': recipe.name, 'about': recipe.get('about', None), 'extra': recipe.get('extra', None), 'recipe': recipe, 'packages': packages, } renderer.render_to_file(output_file, 'readme.rst_t', template_options) return [output_file]
def get_version(p): meta_path = os.path.join(p, 'meta.yaml') meta = load_first_metadata(meta_path, finalize=False) version = meta.get_value('package/version') return VersionOrder(version)
def max_vers(x): return max(VersionOrder(v) for v in x)
def generate_readme(folder, repodata, renderer): """Generates README.rst for the recipe in folder Args: folder: Toplevel folder name in recipes directory repodata: RepoData object renderer: Renderer object Returns: List of template_options for each concurrent version for which meta.yaml files exist in the recipe folder and its subfolders """ # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: logger.error("'{}' does not look like a proper version!" "".format(sf)) continue versions.append(sf) # Read the meta.yaml file(s) try: recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders return [] except UnableToParse as e: logger.error("Failed to parse recipe {}".format(recipe)) raise e ## Get all versions and build numbers for data package # Select meta yaml meta_fname = op.join(RECIPE_DIR, folder, 'meta.yaml') if not op.exists(meta_fname): for item in os.listdir(op.join(RECIPE_DIR, folder)): dname = op.join(RECIPE_DIR, folder, item) if op.isdir(dname): fname = op.join(dname, 'meta.yaml') if op.exists(fname): meta_fname = fname break else: logger.error("No 'meta.yaml' found in %s", folder) return [] meta_relpath = meta_fname[len(RECIPE_DIR)+1:] # Read the meta.yaml file(s) try: recipe_object = Recipe.from_file(RECIPE_DIR, meta_fname) except RecipeError as e: logger.error("Unable to process %s: %s", meta_fname, e) return [] # Format the README for package in sorted(list(set(recipe_object.package_names))): versions_in_channel = set(repodata.get_package_data(['version', 'build_number'], channels='ggd-genomics', name=package)) sorted_versions = sorted(versions_in_channel, key=lambda x: (VersionOrder(x[0]), x[1]), reverse=False) if sorted_versions: depends = [ depstring.split(' ', 1) if ' ' in depstring else (depstring, '') for depstring in repodata.get_package_data('depends', name=package, version=sorted_versions[0][0], build_number=sorted_versions[0][1], )[0] ] else: depends = [] # Format the README name = metadata.name() versions_in_channel = repodata.get_versions(name) template_options = { 'name': name, 'about': (metadata.get_section('about') or {}), 'species': (metadata.get_section('about')["identifiers"]["species"] if "species" in metadata.get_section('about')["identifiers"] else {}), 'genome_build': (metadata.get_section('about')["identifiers"]["genome-build"] if "genome-build" in metadata.get_section('about')["identifiers"] else {}), 'ggd_channel': (metadata.get_section('about')["tags"]["ggd-channel"] if "ggd-channel" in metadata.get_section('about')["tags"] else "genomics"), 'extra': (metadata.get_section('extra') or {}), 'versions': ["-".join(str(w) for w in v) for v in sorted_versions], 'gh_recipes': 'https://github.com/gogetdata/ggd-recipes/tree/master/recipes/', 'recipe_path': op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR)), 'Package': '<a href="recipes/{0}/README.html">{0}</a>'.format(name) } renderer.render_to_file( op.join(OUTPUT_DIR, name, 'README.rst'), 'readme.rst_t', template_options) recipes = [] latest_version = "-".join(str(w) for w in sorted_versions[-1]) for version, version_info in sorted(versions_in_channel.items()): t = template_options.copy() if 'noarch' in version_info: t.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '<i class="fa fa-dot-circle-o"></i>', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '<i class="fa fa-dot-circle-o"></i>', 'NOARCH': '<i class="fa fa-desktop"></i>' if 'noarch' in version_info else '', 'Version': latest_version ## The latest version #'Version': version }) else: t.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '', 'NOARCH': '<i class="fa fa-desktop"></i>' if 'noarch' in version_info else '', 'Version': latest_version ## The latest version #'Version': version }) recipes.append(t) return recipes