def test_latest_version_npm( name, inp, curr_ver, ver, source, urls, requests_mock, tmpdir, ): pmy = LazyJson(tmpdir.join("cf-scripts-test.json")) pmy.update(parse_meta_yaml(inp)["source"]) pmy.update( { "feedstock_name": name, "version": curr_ver, "raw_meta_yaml": inp, "meta_yaml": parse_meta_yaml(inp), }, ) [requests_mock.get(url, text=text) for url, text in urls.items()] attempt = get_latest_version(name, pmy, [source]) if ver is None: assert not (attempt["new_version"] is False) assert attempt["new_version"] != curr_ver assert VersionOrder(attempt["new_version"]) > VersionOrder(curr_ver) elif ver is False: assert attempt["new_version"] is ver else: assert ver == attempt["new_version"]
def run_test_yaml_migration(m, *, inp, output, kwargs, prb, mr_out, tmpdir, should_filter=False): os.makedirs(os.path.join(tmpdir, "recipe"), exist_ok=True) with open(os.path.join(tmpdir, "recipe", "meta.yaml"), "w") as f: f.write(inp) with indir(tmpdir): subprocess.run(["git", "init"]) # Load the meta.yaml (this is done in the graph) try: pmy = parse_meta_yaml(inp) except Exception: pmy = {} if pmy: pmy["version"] = pmy["package"]["version"] pmy["req"] = set() for k in ["build", "host", "run"]: pmy["req"] |= set(pmy.get("requirements", {}).get(k, set())) try: pmy["meta_yaml"] = parse_meta_yaml(inp) except Exception: pmy["meta_yaml"] = {} pmy["raw_meta_yaml"] = inp pmy.update(kwargs) assert m.filter(pmy) is should_filter if should_filter: return mr = m.migrate(os.path.join(tmpdir, "recipe"), pmy) assert mr_out == mr pmy.update(PRed=[frozen_to_json_friendly(mr)]) with open(os.path.join(tmpdir, "recipe/meta.yaml")) as f: actual_output = f.read() assert actual_output == output assert os.path.exists( os.path.join(tmpdir, ".ci_support/migrations/hi.yaml")) with open(os.path.join(tmpdir, ".ci_support/migrations/hi.yaml")) as f: saved_migration = f.read() assert saved_migration == m.yaml_contents
def test_parse_cudnn(plat, arch, cfg, has_cudnn): recipe_dir = os.path.abspath( os.path.join( os.path.dirname(__file__), "pytorch-cpu-feedstock", "recipe", ), ) with open(os.path.join(recipe_dir, "meta.yaml")) as fp: recipe_text = fp.read() meta = parse_meta_yaml( recipe_text, for_pinning=False, platform=plat, arch=arch, recipe_dir=recipe_dir, cbc_path=os.path.join(recipe_dir, "..", ".ci_support", cfg), log_debug=True, ) if has_cudnn: assert any( "cudnn" in out.get("requirements", {}).get("host", []) for out in meta["outputs"] ), pprint.pformat(meta) else: assert all( "cudnn" not in out.get("requirements", {}).get("host", []) for out in meta["outputs"] ), pprint.pformat(meta)
def test_latest_version_rawurl(name, inp, curr_ver, ver, source, urls, tmpdir): pmy = LazyJson(tmpdir.join("cf-scripts-test.json")) pmy.update(parse_meta_yaml(inp)["source"]) pmy.update( { "feedstock_name": name, "version": curr_ver, "raw_meta_yaml": inp, "meta_yaml": parse_meta_yaml(inp), }, ) attempt = get_latest_version(name, pmy, [source]) if ver is None: assert not (attempt["new_version"] is False) assert attempt["new_version"] != curr_ver assert VersionOrder(attempt["new_version"]) > VersionOrder(curr_ver) elif ver is False: assert attempt["new_version"] is ver else: assert ver == attempt["new_version"]
def test_migration(m, inp, output, kwargs, prb, mr_out, should_filter, tmpdir): mr_out.update(bot_rerun=False) with open(os.path.join(tmpdir, "meta.yaml"), "w") as f: f.write(inp) # Load the meta.yaml (this is done in the graph) try: pmy = parse_meta_yaml(inp) except Exception: pmy = {} if pmy: pmy["version"] = pmy["package"]["version"] pmy["req"] = set() for k in ["build", "host", "run"]: pmy["req"] |= set(pmy.get("requirements", {}).get(k, set())) try: pmy["meta_yaml"] = parse_meta_yaml(inp) except Exception: pmy["meta_yaml"] = {} pmy["raw_meta_yaml"] = inp pmy.update(kwargs) assert m.filter(pmy) is should_filter if should_filter: return mr = m.migrate(tmpdir, pmy) assert mr_out == mr pmy.update(PRed=[frozen_to_json_friendly(mr)]) with open(os.path.join(tmpdir, "meta.yaml"), "r") as f: actual_output = f.read() assert actual_output == output if isinstance(m, Compiler): assert m.messages in m.pr_body() # TODO: fix subgraph here (need this to be xsh file) elif isinstance(m, Version): pass elif isinstance(m, Rebuild): return else: assert prb in m.pr_body() assert m.filter(pmy) is True
def test_migration(m, inp, output, kwargs, prb, mr_out, tmpdir): with open(os.path.join(tmpdir, "meta.yaml"), "w") as f: f.write(inp) # Load the meta.yaml (this is done in the graph) try: pmy = parse_meta_yaml(inp) except Exception: pmy = {} if pmy: pmy["version"] = pmy["package"]["version"] pmy["req"] = set() for k in ["build", "host", "run"]: pmy["req"] |= set(pmy.get("requirements", {}).get(k, set())) try: pmy["meta_yaml"] = parse_meta_yaml(inp) except Exception: pmy["meta_yaml"] = {} pmy["raw_meta_yaml"] = inp pmy.update(kwargs) assert m.filter(pmy) is False mr = m.migrate(tmpdir, pmy) assert mr_out == mr pmy.update(PRed=[mr]) with open(os.path.join(tmpdir, "meta.yaml"), "r") as f: assert f.read() == output if isinstance(m, Compiler): assert m.out in m.pr_body() # TODO: fix subgraph here (need this to be xsh file) elif isinstance(m, Version): pass else: assert prb in m.pr_body() assert m.filter(pmy) is True
def _fetch_and_parse_meta_yaml(name, organization=None, cached=False): from conda_forge_tick.utils import parse_meta_yaml if cached: filepath = f'./{name}-feedstock/recipe/meta.yaml' if not os.path.exists(filepath): filepath = filepath.replace('./', 'feedstocks/', 1) if not os.path.exists(filepath): raise RuntimeError(f'Cached feedstock {name} does not exist. Place ' 'cloned repo in ./ or ./feedstocks/ and try again.') with open(filepath, 'r') as f: meta_yaml = f.read() else: if organization is None: raise ValueError(f'No organization provided for {name}') meta_yaml = _fetch_file(organization, name, 'recipe/meta.yaml') if isinstance(meta_yaml, requests.Response): return None return parse_meta_yaml(meta_yaml)
def test_latest_version(inp, ver, source, urls, requests_mock, tmpdir): pmy = LazyJson(tmpdir.join("cf-scripts-test.json")) pmy.update(parse_meta_yaml(inp)["source"]) [requests_mock.get(url, text=text) for url, text in urls.items()] attempt = get_latest_version("configurable-http-proxy", pmy, [source]) assert ver == attempt["new_version"]
def run_test_migration( m, inp, output, kwargs, prb, mr_out, should_filter=False, tmpdir=None, ): mm_ctx = MigratorSessionContext( graph=G, smithy_version="", pinning_version="", github_username="", github_password="", circle_build_url=env["CIRCLE_BUILD_URL"], ) m_ctx = MigratorContext(mm_ctx, m) m.bind_to_ctx(m_ctx) if mr_out: mr_out.update(bot_rerun=False) with open(os.path.join(tmpdir, "meta.yaml"), "w") as f: f.write(inp) # read the conda-forge.yml if os.path.exists(os.path.join(tmpdir, "..", "conda-forge.yml")): with open(os.path.join(tmpdir, "..", "conda-forge.yml")) as fp: cf_yml = fp.read() else: cf_yml = "{}" # Load the meta.yaml (this is done in the graph) try: name = parse_meta_yaml(inp)["package"]["name"] except Exception: name = "blah" pmy = populate_feedstock_attributes(name, {}, inp, cf_yml) # these are here for legacy migrators pmy["version"] = pmy["meta_yaml"]["package"]["version"] pmy["req"] = set() for k in ["build", "host", "run"]: req = pmy["meta_yaml"].get("requirements", {}) or {} _set = req.get(k) or set() pmy["req"] |= set(_set) pmy["raw_meta_yaml"] = inp pmy.update(kwargs) assert m.filter(pmy) is should_filter if should_filter: return m.run_pre_piggyback_migrations( tmpdir, pmy, hash_type=pmy.get("hash_type", "sha256"), ) mr = m.migrate(tmpdir, pmy, hash_type=pmy.get("hash_type", "sha256")) m.run_post_piggyback_migrations( tmpdir, pmy, hash_type=pmy.get("hash_type", "sha256"), ) assert mr_out == mr if not mr: return pmy.update(PRed=[frozen_to_json_friendly(mr)]) with open(os.path.join(tmpdir, "meta.yaml")) as f: actual_output = f.read() # strip jinja comments pat = re.compile(r"{#.*#}") actual_output = pat.sub("", actual_output) output = pat.sub("", output) assert actual_output == output
def migrate( self, recipe_dir: str, attrs: "AttrsTypedDict", hash_type: str = "sha256", **kwargs: Any, ) -> "MigrationUidTypedDict": # Render with new version but nothing else version = attrs["new_version"] assert isinstance(version, str) with indir(recipe_dir): with open("meta.yaml", "r") as fp: text = fp.read() res = re.search(r"\s*-?\s*url:.*?\n( {4}-.*\n?)*", text) if res: url = res.group() else: raise ValueError("Could not match url") if "cran.r-project.org/src/contrib" in url or "cran_mirror" in url: version = version.replace("_", "-") with indir(recipe_dir), env.swap(VERSION=version): for f, p, n in self.patterns: p = eval_version(p) n = eval_version(n) replace_in_file(p, n, f) with open("meta.yaml", "r") as fp: text = fp.read() # render the text and check that the URL exists, if it doesn't try variations # if variations then update url rendered = parse_meta_yaml(render_meta_yaml(text)) # only run for single url recipes as the moment if (isinstance(rendered["source"], dict) and isinstance(rendered["source"].get("url", []), str) and requests.get(rendered["source"]["url"]).status_code != 200): with indir(recipe_dir): for (a, b), (c, d) in product( permutations(["v{{ v", "{{ v"]), permutations([".zip", ".tar.gz"]), ): inner_text = text.replace(a, b).replace(c, d) rendered = parse_meta_yaml(render_meta_yaml(inner_text)) if requests.get( rendered["source"]["url"]).status_code == 200: text = inner_text # The above clauses could do bad things the version # itself text = text.replace("version: v{{ v", "version: {{ v") with open("meta.yaml", "w") as fp: fp.write(text) break # Get patterns to replace checksum for each platform rendered_text = render_meta_yaml(text) urls = self.find_urls(rendered_text) new_patterns = self.get_hash_patterns("meta.yaml", urls, hash_type) with indir(recipe_dir): for f, p, n in new_patterns: p = eval_version(p) n = eval_version(n) replace_in_file(p, n, f) self.set_build_number("meta.yaml") return super().migrate(recipe_dir, attrs)
def get_url(self, meta_yaml) -> Optional[str]: if "feedstock_name" not in meta_yaml: return None if "version" not in meta_yaml: return None # TODO: pull this from the graph itself content = meta_yaml["raw_meta_yaml"] # this while statment runs until a bad version is found # then it uses the previous one orig_urls = urls_from_meta(meta_yaml["meta_yaml"]) current_ver = meta_yaml["version"] current_sha256 = None orig_ver = current_ver found = True count = 0 max_count = 10 while found and count < max_count: found = False for next_ver in next_version(current_ver): logger.debug("trying version: %s", next_ver) new_content = content.replace(orig_ver, next_ver) new_meta = parse_meta_yaml(new_content) new_urls = urls_from_meta(new_meta) if len(new_urls) == 0: logger.debug("No URL in meta.yaml") meta_yaml["bad"] = "Upstream: no url in yaml" return None url_to_use = None for url in urls_from_meta(new_meta): # this URL looks bad if these things happen if (str(new_meta["package"]["version"]) != next_ver or meta_yaml["url"] == url or url in orig_urls): continue logger.debug("trying url: %s", url) _exists, _url_to_use = url_exists_swap_exts(url) if not _exists: logger.debug("version %s does not exist for url %s", next_ver, url) continue else: url_to_use = _url_to_use if url_to_use is not None: found = True count = count + 1 current_ver = next_ver new_sha256 = get_sha256(url_to_use) if new_sha256 == current_sha256 or new_sha256 in new_content: return None current_sha256 = new_sha256 logger.debug("version %s is ok for url %s", current_ver, url_to_use) break if count == max_count: return None if current_ver != orig_ver: logger.debug("using version %s", current_ver) return current_ver return None
def test_latest_version(inp, ver, source, urls, requests_mock, tmpdir): pmy = LazyJson(tmpdir.join("cf-scripts-test.json")) pmy.update(parse_meta_yaml(inp)["source"]) [requests_mock.get(url, text=text) for url, text in urls.items()] assert ver == get_latest_version(pmy, [source])
def create_migration_yaml_creator(migrators: MutableSequence[Migrator], gx: nx.DiGraph): cfp_gx = copy.deepcopy(gx) for node in list(cfp_gx.nodes): if node != "conda-forge-pinning": pluck(cfp_gx, node) print("pinning migrations", flush=True) with indir(os.environ["CONDA_PREFIX"]): pinnings = parse_config_file( "conda_build_config.yaml", config=Config(**CB_CONFIG), ) feedstocks_to_be_repinned = [] for pinning_name, package_pin_list in pinnings.items(): # there are three things: # pinning_name - entry in pinning file # package_name - the actual package, could differ via `-` -> `_` # from pinning_name # feedstock_name - the feedstock that outputs the package # we need the package names for the migrator itself but need the # feedstock for everything else # exclude non-package keys if pinning_name not in gx.graph["outputs_lut"]: # conda_build_config.yaml can't have `-` unlike our package names package_name = pinning_name.replace("_", "-") else: package_name = pinning_name # replace sub-packages with their feedstock names # TODO - we are grabbing one element almost at random here # the sorted call makes it stable at least? fs_name = next( iter( sorted(gx.graph["outputs_lut"].get(package_name, {package_name})), ), ) if ((fs_name in gx.nodes) and not gx.nodes[fs_name]["payload"].get("archived", False) and gx.nodes[fs_name]["payload"].get("version") and fs_name not in feedstocks_to_be_repinned): current_pins = list(map(str, package_pin_list)) current_version = str(gx.nodes[fs_name]["payload"]["version"]) # we need a special parsing for pinning stuff meta_yaml = parse_meta_yaml( gx.nodes[fs_name]["payload"]["raw_meta_yaml"], for_pinning=True, ) # find the most stringent max pin for this feedstock if any pin_spec = "" for block in [meta_yaml] + meta_yaml.get("outputs", []) or []: build = block.get("build", {}) or {} # and check the exported package is within the feedstock exports = [ p.get("max_pin", "") for p in build.get("run_exports", [{}]) # make certain not direct hard pin if isinstance(p, MutableMapping) # ensure the export is for this package and p.get("package_name", "") == package_name # ensure the pinned package is in an output of the parent feedstock and (fs_name in gx.graph["outputs_lut"].get( p.get("package_name", ""), set())) ] if not exports: continue # get the most stringent pin spec from the recipe block max_pin = max(exports, key=len) if len(max_pin) > len(pin_spec): pin_spec = max_pin # fall back to the pinning file or "x" if not pin_spec: pin_spec = (pinnings["pin_run_as_build"].get( pinning_name, {}).get("max_pin", "x")) or "x" current_pins = list( map(lambda x: re.sub("[^0-9.]", "", x).rstrip("."), current_pins), ) current_pins = [ cp.strip() for cp in current_pins if cp.strip() != "" ] current_version = re.sub("[^0-9.]", "", current_version).rstrip(".") if not current_pins or current_version == "": continue current_pin = str(max(map(VersionOrder, current_pins))) # If the current pin and the current version is the same nothing # to do even if the pin isn't accurate to the spec if current_pin != current_version and _outside_pin_range( pin_spec, current_pin, current_version, ): feedstocks_to_be_repinned.append(fs_name) print( " %s:\n" " curr version: %s\n" " curr pin: %s\n" " pin_spec: %s" % (pinning_name, current_version, current_pin, pin_spec), flush=True, ) migrators.append( MigrationYamlCreator( pinning_name, current_version, current_pin, pin_spec, fs_name, cfp_gx, full_graph=gx, ), ) print(" ", flush=True)
def get_url(self, meta_yaml) -> Optional[str]: if "feedstock_name" not in meta_yaml: return None if "version" not in meta_yaml: return None # TODO: pull this from the graph itself content = meta_yaml["raw_meta_yaml"] if any(ln.startswith("{% set version") for ln in content.splitlines()): has_version_jinja2 = True else: has_version_jinja2 = False # this while statement runs until a bad version is found # then it uses the previous one orig_urls = urls_from_meta(meta_yaml["meta_yaml"]) logger.debug("orig urls: %s", orig_urls) current_ver = meta_yaml["version"] current_sha256 = None orig_ver = current_ver found = True count = 0 max_count = 10 while found and count < max_count: found = False for next_ver in self.next_ver_func(current_ver): logger.debug("trying version: %s", next_ver) if has_version_jinja2: _new_lines = [] for ln in content.splitlines(): if ln.startswith("{% set version ") or ln.startswith( "{% set version=", ): _new_lines.append('{%% set version = "%s" %%}' % next_ver) else: _new_lines.append(ln) new_content = "\n".join(_new_lines) else: new_content = content.replace(orig_ver, next_ver) new_meta = parse_meta_yaml(new_content) new_urls = urls_from_meta(new_meta) if len(new_urls) == 0: logger.debug("No URL in meta.yaml") return None logger.debug("parsed new version: %s", new_meta["package"]["version"]) url_to_use = None for url in urls_from_meta(new_meta): # this URL looks bad if these things happen if (str(new_meta["package"]["version"]) != next_ver or meta_yaml["url"] == url or url in orig_urls): logger.debug( "skipping url '%s' due to " "\n %s = %s\n %s = %s\n %s = %s", url, 'str(new_meta["package"]["version"]) != next_ver', str(new_meta["package"]["version"]) != next_ver, 'meta_yaml["url"] == url', meta_yaml["url"] == url, "url in orig_urls", url in orig_urls, ) continue logger.debug("trying url: %s", url) _exists, _url_to_use = url_exists_swap_exts(url) if not _exists: logger.debug( "version %s does not exist for url %s", next_ver, url, ) continue else: url_to_use = _url_to_use if url_to_use is not None: found = True count = count + 1 current_ver = next_ver new_sha256 = get_sha256(url_to_use) if new_sha256 == current_sha256 or new_sha256 in new_content: logger.debug( "skipping url %s because it returned the same hash", url_to_use, ) return None current_sha256 = new_sha256 logger.debug("version %s is ok for url %s", current_ver, url_to_use) break if current_ver != orig_ver: logger.debug("using version %s", current_ver) return current_ver return None
def test_latest_version(inp, ver, source, urls, requests_mock): pmy = parse_meta_yaml(inp)["source"] [requests_mock.get(url, text=text) for url, text in urls.items()] assert ver == get_latest_version(pmy, [source])