def test_make_grayskull_recipe(): with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), ) as f: attrs = load(f) recipe = make_grayskull_recipe(attrs) assert recipe != ""
def test_get_grayskull_comparison(): with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), ) as f: attrs = load(f) d, rs = get_grayskull_comparison(attrs) assert rs != "" assert d["run"]["cf_minus_df"] == {"python <3.9"} assert any(_d.startswith("python") for _d in d["run"]["df_minus_cf"])
def test_get_depfinder_comparison(): with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), ) as f: attrs = load(f) with tempfile.TemporaryDirectory() as tmpdir: pth = os.path.join(tmpdir, "meta.yaml") with open(pth, "w") as fp: fp.write(attrs["raw_meta_yaml"]) d = get_depfinder_comparison(tmpdir, attrs, {"conda"}) assert len(d["run"]) == 0 assert "host" not in d
def compare_depfinder_audits(gx): # This really needs to be all the python packages, since this doesn't cover outputs python_nodes = {n for n, v in gx.nodes("payload") if "python" in v.get("req", "")} python_nodes.update( [ k for node_name, node in gx.nodes("payload") for k in node.get("outputs_names", []) if node_name in python_nodes ], ) bad_inspection = {} files = os.listdir("audits/depfinder") if "_net_audit.json" in files: files.pop(files.index("_net_audit.json")) for node, attrs in gx.nodes("payload"): if ( attrs.get("version", None) is None or attrs.get("archived", False) or attrs.get("bad", False) ): continue if "requirements" not in attrs: print("node %s doesn't have requirements!" % node, flush=True) continue node_version = f"{node}_{attrs['version']}" # construct the expected filename expected_filename = f"{node_version}.json" if expected_filename in files: with open(os.path.join("audits/depfinder", expected_filename)) as f: output = load(f) if isinstance(output, str) or "traceback" in output: bad_inspection[node_version] = output continue d = extract_missing_packages( required_packages=output.get("required", {}), questionable_packages=output.get("questionable", {}), run_packages=attrs["requirements"]["run"], node=node, python_nodes=python_nodes, ) bad_inspection[node_version] = d or False with open("audits/depfinder/_net_audit.json", "w") as f: dump(bad_inspection, f) return bad_inspection
def compare_depfinder_audits(gx): # This really needs to be all the python packages, since this doesn't cover outputs python_nodes = { n for n, v in gx.nodes("payload") if "python" in v.get("req", "") } python_nodes.update([ k for node_name, node in gx.nodes("payload") for k in node.get("outputs_names", []) if node_name in python_nodes ], ) imports_by_package, packages_by_import = create_package_import_maps( python_nodes, # set(gx.nodes) ) bad_inspection = {} files = os.listdir("audits/depfinder") if "_net_audit.json" in files: files.pop(files.index("_net_audit.json")) for node, attrs in gx.nodes("payload"): if not attrs.get("version"): continue node_version = f"{node}_{attrs['version']}" # construct the expected filename expected_filename = f"{node_version}.json" if expected_filename in files: with open(os.path.join("audits/depfinder", expected_filename), "r") as f: output = load(f) if isinstance(output, str): bad_inspection[node_version] = output continue d = extract_missing_packages( required_imports=output.get("required", set()), questionable_imports=output.get("questionable", set()), run_packages=attrs["requirements"]["run"], package_by_import=packages_by_import, import_by_package=imports_by_package, node=node, nodes=python_nodes, # set(gx.nodes) ) bad_inspection[node_version] = d or False with open("audits/depfinder/_net_audit.json", "w") as f: dump(bad_inspection, f) return bad_inspection
def test_depfinder_audit_feedstock(): from conda_forge_tick.audit import depfinder_audit_feedstock mm_ctx = MigratorSessionContext( graph=G, smithy_version="", pinning_version="", github_username="", github_password="", circle_build_url="", ) with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), ) as f: attrs = load(f) fctx = FeedstockContext("depfinder", "depfinder", attrs) deps = depfinder_audit_feedstock(fctx, mm_ctx) assert deps == { "builtin": { "ConfigParser", "__future__", "argparse", "ast", "collections", "configparser", "copy", "distutils.command.build_py", "distutils.command.sdist", "distutils.core", "errno", "fnmatch", "io", "itertools", "json", "logging", "os", "pdb", "pkgutil", "pprint", "re", "subprocess", "sys", }, "questionable": {"setuptools", "ipython", "cx_freeze"}, "required": {"pyyaml", "stdlib-list", "setuptools", "versioneer"}, }
def test_grayskull_audit_feedstock(): mm_ctx = MigratorSessionContext( graph=G, smithy_version="", pinning_version="", github_username="", github_password="", circle_build_url="", ) with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), "r", ) as f: attrs = load(f) fctx = FeedstockContext("depfinder", "depfinder", attrs) recipe = grayskull_audit_feedstock(fctx, mm_ctx) assert recipe == DEPFINDER_RECIPE
def compare_grayskull_audits(gx): grayskull_files = os.listdir("audits/grayskull") bad_inspections = {} if "_net_audit.json" in grayskull_files: grayskull_files.pop(grayskull_files.index("_net_audit.json")) with open("audits/grayskull/_net_audit.json") as f: bad_inspections = load(f) futures = {} with executor("dask", max_workers=20) as pool: for node, attrs in gx.nodes("payload"): if not attrs.get("version"): continue node_version = f"{node}_{attrs['version']}" if node_version in bad_inspections: continue # construct the expected filename expected_filename = f"{node_version}.yml" if expected_filename in grayskull_files: with open( os.path.join("audits/grayskull", expected_filename), ) as f: meta_yaml = f.read() futures[ pool.submit( inner_grayskull_comparison, meta_yaml=meta_yaml, attrs=attrs, node=node, ) ] = node_version for future in as_completed(futures): try: bad_inspections[futures[future]] = future.result() except Exception as e: bad_inspections[futures[future]] = str(e) with open("audits/grayskull/_net_audit.json", "w") as f: dump(bad_inspections, f) return bad_inspections
def test_update_deps_version(caplog, tmpdir, update_kind, out_yml): caplog.set_level( logging.DEBUG, logger="conda_forge_tick.migrators.version", ) with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), ) as f: attrs = load(f) in_yaml = (attrs["raw_meta_yaml"].replace("2.3.0", "2.2.0").replace( "2694acbc8f7", "")) new_ver = "2.3.0" kwargs = { "new_version": new_ver, "conda-forge.yml": { "bot": { "inspection": update_kind } }, } os.makedirs(os.path.join(tmpdir, "recipe")) with open(os.path.join(tmpdir, "recipe", "meta.yaml"), "w") as fp: fp.write(in_yaml) run_test_migration( m=VERSION, inp=in_yaml, output=out_yml, kwargs=kwargs, prb="Dependencies have been updated if changed", mr_out={ "migrator_name": "Version", "migrator_version": Version.migrator_version, "version": new_ver, }, tmpdir=os.path.join(tmpdir, "recipe"), make_body=True, )
def compare_depfinder_audits(gx): bad_inspection = {} files = os.listdir("audits/depfinder") if "_net_audit.json" in files: files.pop(files.index("_net_audit.json")) for node, attrs in gx.nodes("payload"): if not attrs.get("version"): continue node_version = f"{node}_{attrs['version']}" # construct the expected filename expected_filename = f"{node_version}.json" if expected_filename in files: with open(os.path.join("audits/depfinder", expected_filename), "r") as f: output = load(f) if isinstance(output, str): bad_inspection[node_version] = output continue quest = output.get("questionable", set()) required_pkgs = output.get("required", set()) d = {} run_req = attrs["requirements"]["run"] excludes = { node, node.replace("-", "_"), node.replace("_", "-"), "python", "setuptools", } cf_minus_df = run_req - required_pkgs - excludes - quest if cf_minus_df: d.update(cf_minus_df=cf_minus_df) df_minus_cf = required_pkgs - run_req - excludes if df_minus_cf: d.update(df_minus_cf=df_minus_cf) bad_inspection[node_version] = d or False with open("audits/depfinder/_net_audit.json", "w") as f: dump(bad_inspection, f) return bad_inspection
def test_update_run_deps(): with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), ) as f: attrs = load(f) d, rs = get_grayskull_comparison(attrs) lines = attrs["raw_meta_yaml"].splitlines() lines = [ln + "\n" for ln in lines] recipe = CondaMetaYAML("".join(lines)) updated_deps = _update_sec_deps(recipe, d, ["host", "run"], update_python=False) print("\n" + recipe.dumps()) assert not updated_deps assert "python <3.9" in recipe.dumps() updated_deps = _update_sec_deps(recipe, d, ["host", "run"], update_python=True) print("\n" + recipe.dumps()) assert updated_deps assert "python >=3.6" in recipe.dumps()
def main(args): gx = load_graph() ctx = MigratorSessionContext("", "", "") start_time = time.time() os.makedirs("audits", exist_ok=True) for k, v in AUDIT_REGISTRY.items(): audit_dir = os.path.join("audits", k) version_path = os.path.join(audit_dir, "_version.json") audit_version = "_".join([v["version"], v["creation_version"]]) if os.path.exists(version_path): version = load(open(version_path)) # if the version of the code generating the audits is different from our current audit data # clear out the audit data so we always use the latest version if version != audit_version: shutil.rmtree(audit_dir) os.makedirs(audit_dir, exist_ok=True) dump(audit_version, open(version_path, "w")) # TODO: generalize for cran skeleton # limit graph to things that depend on python python_des = nx.descendants(gx, "python") for node in sorted( python_des, key=lambda x: (len(nx.descendants(gx, x)), x), reverse=True, ): if time.time() - int(env.get("START_TIME", start_time)) > int( env.get("TIMEOUT", 60 * RUNTIME_MINUTES), ): break # depfinder only work on python at the moment so only work on things # with python as runtime dep payload = gx.nodes[node]["payload"] for k, v in AUDIT_REGISTRY.items(): version = payload.get("version", None) ext = v["ext"] if ( not payload.get("archived", False) and not payload.get("bad", False) and version and "python" in payload["requirements"]["run"] and f"{node}_{version}.{ext}" not in os.listdir(f"audits/{k}") ): fctx = FeedstockContext( package_name=node, feedstock_name=payload["feedstock_name"], attrs=payload, ) try: deps = v["run"](fctx, ctx) except Exception as e: deps = { "exception": str(e), "traceback": str(traceback.format_exc()).split("\n"), } if "dumper" in v: deps = v["dumper"](deps) finally: if deps: with open(f"audits/{k}/{node}_{version}.{ext}", "w") as f: v["writer"](deps, f) # grayskull_audit_outcome = compare_grayskull_audits(gx) # compute_grayskull_accuracy(grayskull_audit_outcome) depfinder_audit_outcome = compare_depfinder_audits(gx) compute_depfinder_accuracy(depfinder_audit_outcome)
node=node, ) ] = node_version for future in as_completed(futures): try: bad_inspections[futures[future]] = future.result() except Exception as e: bad_inspections[futures[future]] = str(e) with open("audits/grayskull/_net_audit.json", "w") as f: dump(bad_inspections, f) return bad_inspections try: RANKINGS = load(open("ranked_hubs_authorities.json")) except FileNotFoundError: RANKINGS = [] def extract_missing_packages( required_packages, questionable_packages, run_packages, node, python_nodes, ): exclude_packages = STATIC_EXCLUDES.union( {node, node.replace("-", "_"), node.replace("_", "-")}, )
def test_grayskull_audit_feedstock(): mm_ctx = MigratorSessionContext( graph=G, smithy_version="", pinning_version="", github_username="", github_password="", circle_build_url="", ) with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), "r", ) as f: attrs = load(f) fctx = FeedstockContext("depfinder", "depfinder", attrs) recipe = grayskull_audit_feedstock(fctx, mm_ctx) assert (recipe == """{% set name = "depfinder" %} {% set version = 2.3.0 %} package: name: {{ name|lower }} version: {{ version }} source: url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz sha256: 2694acbc8f7d94ca9bae55b8dc5b4860d5bc253c6a377b3b8ce63fb5bffa4000 build: number: 0 noarch: python entry_points: - depfinder = depfinder.cli:cli script: {{ PYTHON }} -m pip install . -vv requirements: host: - pip - python run: - python - pyyaml - stdlib-list test: imports: - depfinder commands: - pip check - depfinder --help requires: - pip about: home: http://github.com/ericdill/depfinder summary: Find all the imports in your library doc_url: https://pythonhosted.org/depfinder/ license: BSD-3-Clause license_file: LICENSE extra: recipe-maintainers: - ericdill - mariusvniekerk - tonyfast - ocefpaf """)
from dataclasses import dataclass from networkx import DiGraph import typing import threading import github3 from conda_forge_tick.utils import load from typing import Union if typing.TYPE_CHECKING: from conda_forge_tick.migrators import Migrator from conda_forge_tick.migrators_types import AttrsTypedDict if os.path.exists("all_feedstocks.json"): with open("all_feedstocks.json") as f: DEFAULT_BRANCHES = load(f).get("default_branches", {}) else: DEFAULT_BRANCHES = {} @dataclass class GithubContext: github_username: str github_password: str circle_build_url: str github_token: typing.Optional[str] = "" dry_run: bool = True _tl: threading.local = threading.local() @property def gh(self) -> github3.GitHub:
import glob import tqdm from conda_forge_tick.utils import load, dump from conda_forge_tick.git_utils import trim_pr_josn_keys fnames = glob.glob("pr_json/*.json") print("found %d json files" % len(fnames), flush=True) for fname in tqdm.tqdm(fnames): with open(fname) as fp: pr_json = load(fp) pr_json = trim_pr_josn_keys(pr_json) with open(fname, "w") as fp: dump(pr_json, fp)