def main(args): gx = load_graph() ctx = MigratorSessionContext("", "", "") start_time = time.time() os.makedirs("audits", exist_ok=True) for k in AUDIT_REGISTRY: os.makedirs(os.path.join("audits", k), exist_ok=True) # TODO: generalize for cran skeleton # limit graph to things that depend on python python_des = nx.descendants(gx, "pypy-meta") for node in sorted( python_des, key=lambda x: (len(nx.descendants(gx, x)), x), reverse=True, ): if time.time() - int(env.get("START_TIME", start_time)) > int( env.get("TIMEOUT", 60 * 30), ): break # depfinder only work on python at the moment so only work on things # with python as runtime dep payload = gx.nodes[node]["payload"] for k, v in AUDIT_REGISTRY.items(): version = payload.get("version", None) ext = v["ext"] if (not payload.get("archived", False) and version and "python" in payload["requirements"]["run"] and f"{node}_{version}.{ext}" not in os.listdir(f"audits/{k}")): print(node) fctx = FeedstockContext( package_name=node, feedstock_name=payload["name"], attrs=payload, ) try: deps = v["run"](fctx, ctx) except Exception as e: deps = { "exception": str(e), "traceback": str(traceback.format_exc()).split("\n"), } if "dumper" in v: deps = v["dumper"](deps) finally: with open(f"audits/{k}/{node}_{version}.{ext}", "w") as f: v["writer"](deps, f) compare_grayskull_audits(gx) compare_depfinder_audits(gx)
def test_depfinder_audit_feedstock(): from conda_forge_tick.audit import depfinder_audit_feedstock mm_ctx = MigratorSessionContext( graph=G, smithy_version="", pinning_version="", github_username="", github_password="", circle_build_url="", ) with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), ) as f: attrs = load(f) fctx = FeedstockContext("depfinder", "depfinder", attrs) deps = depfinder_audit_feedstock(fctx, mm_ctx) assert deps == { "builtin": { "ConfigParser", "__future__", "argparse", "ast", "collections", "configparser", "copy", "distutils.command.build_py", "distutils.command.sdist", "distutils.core", "errno", "fnmatch", "io", "itertools", "json", "logging", "os", "pdb", "pkgutil", "pprint", "re", "subprocess", "sys", }, "questionable": {"setuptools", "ipython", "cx_freeze"}, "required": {"pyyaml", "stdlib-list", "setuptools", "versioneer"}, }
def initialize_migrators(github_username="", github_password="", github_token=None, dry_run=False): ''' Setup graph, required contexts, and migrators Parameters ---------- github_username: str, optional Username for bot on GitHub github_password: str, optional Password for bot on GitHub github_token: str, optional Token for bot on GitHub dry_run: bool, optional If true, does not submit pull requests on GitHub Returns ------- tuple Migrator session to interact with GitHub and list of migrators. Currently only returns pre-defined migrators. ''' gx = load_graph() smithy_version = eval_cmd("conda smithy --version").strip() pinning_version = json.loads( eval_cmd("conda list conda-forge-pinning --json"))[0]["version"] for m in MIGRATORS: print( f'{getattr(m, "name", m)} graph size: {len(getattr(m, "graph", []))}' ) ctx = MigratorSessionContext( circle_build_url=os.getenv("CIRCLE_BUILD_URL", ""), graph=gx, smithy_version=smithy_version, pinning_version=pinning_version, github_username=github_username, github_password=github_password, github_token=github_token, dry_run=dry_run, ) return ctx, MIGRATORS
def test_grayskull_audit_feedstock(): mm_ctx = MigratorSessionContext( graph=G, smithy_version="", pinning_version="", github_username="", github_password="", circle_build_url="", ) with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), "r", ) as f: attrs = load(f) fctx = FeedstockContext("depfinder", "depfinder", attrs) recipe = grayskull_audit_feedstock(fctx, mm_ctx) assert recipe == DEPFINDER_RECIPE
def initialize_migrators( github_username: str = "", github_password: str = "", github_token: Optional[str] = None, dry_run: bool = False, ) -> Tuple[MigratorSessionContext, list, MutableSequence[Migrator]]: temp = glob.glob("/tmp/*") gx = load_graph() smithy_version = eval_cmd("conda smithy --version").strip() pinning_version = json.loads( eval_cmd("conda list conda-forge-pinning --json"))[0]["version"] add_arch_migrate(MIGRATORS, gx) migration_factory(MIGRATORS, gx) add_replacement_migrator( MIGRATORS, gx, "matplotlib", "matplotlib-base", ("Unless you need `pyqt`, recipes should depend only on " "`matplotlib-base`."), alt_migrator=MatplotlibBase, ) create_migration_yaml_creator(migrators=MIGRATORS, gx=gx) for m in MIGRATORS: print( f'{getattr(m, "name", m)} graph size: {len(getattr(m, "graph", []))}' ) ctx = MigratorSessionContext( circle_build_url=os.getenv("CIRCLE_BUILD_URL", ""), graph=gx, smithy_version=smithy_version, pinning_version=pinning_version, github_username=github_username, github_password=github_password, github_token=github_token, dry_run=dry_run, ) return ctx, temp, MIGRATORS
def main(args): gx = load_graph() ctx = MigratorSessionContext("", "", "") start_time = time.time() # limit graph to things that depend on python python_des = nx.descendants(gx, "pypy-meta") for node in sorted( python_des, key=lambda x: (len(nx.descendants(gx, x)), x), reverse=True, ): if time.time() - int(env.get("START_TIME", start_time)) > int( env.get("TIMEOUT", 60 * 30)): break # depfinder only work on python at the moment so only work on things # with python as runtime dep os.makedirs("audits", exist_ok=True) with gx.nodes[node]["payload"] as payload: version = payload.get('version', None) if (not payload.get("archived", False) and version and "python" in payload["requirements"]["run"] and f'{node}_{version}.json' not in os.listdir("audits")): print(node) fctx = FeedstockContext(package_name=node, feedstock_name=payload["name"], attrs=payload) try: deps = audit_feedstock(fctx, ctx) except Exception as e: deps = { "exception": str(e), "traceback": str(traceback.format_exc()).split("\n"), } finally: with open(f"audits/{node}_{version}.json", "w") as f: dump(deps, f)
def main(args): gx = load_graph() ctx = MigratorSessionContext("", "", "") start_time = time.time() os.makedirs("audits", exist_ok=True) for k, v in AUDIT_REGISTRY.items(): audit_dir = os.path.join("audits", k) version_path = os.path.join(audit_dir, "_version.json") audit_version = "_".join([v["version"], v["creation_version"]]) if os.path.exists(version_path): version = load(open(version_path)) # if the version of the code generating the audits is different from our current audit data # clear out the audit data so we always use the latest version if version != audit_version: shutil.rmtree(audit_dir) os.makedirs(audit_dir, exist_ok=True) dump(audit_version, open(version_path, "w")) # TODO: generalize for cran skeleton # limit graph to things that depend on python python_des = nx.descendants(gx, "python") for node in sorted( python_des, key=lambda x: (len(nx.descendants(gx, x)), x), reverse=True, ): if time.time() - int(env.get("START_TIME", start_time)) > int( env.get("TIMEOUT", 60 * RUNTIME_MINUTES), ): break # depfinder only work on python at the moment so only work on things # with python as runtime dep payload = gx.nodes[node]["payload"] for k, v in AUDIT_REGISTRY.items(): version = payload.get("version", None) ext = v["ext"] if ( not payload.get("archived", False) and not payload.get("bad", False) and version and "python" in payload["requirements"]["run"] and f"{node}_{version}.{ext}" not in os.listdir(f"audits/{k}") ): fctx = FeedstockContext( package_name=node, feedstock_name=payload["feedstock_name"], attrs=payload, ) try: deps = v["run"](fctx, ctx) except Exception as e: deps = { "exception": str(e), "traceback": str(traceback.format_exc()).split("\n"), } if "dumper" in v: deps = v["dumper"](deps) finally: if deps: with open(f"audits/{k}/{node}_{version}.{ext}", "w") as f: v["writer"](deps, f) # grayskull_audit_outcome = compare_grayskull_audits(gx) # compute_grayskull_accuracy(grayskull_audit_outcome) depfinder_audit_outcome = compare_depfinder_audits(gx) compute_depfinder_accuracy(depfinder_audit_outcome)
def run_test_migration( m, inp, output, kwargs, prb, mr_out, should_filter=False, tmpdir=None, ): mm_ctx = MigratorSessionContext( graph=G, smithy_version="", pinning_version="", github_username="", github_password="", circle_build_url=env["CIRCLE_BUILD_URL"], ) m_ctx = MigratorContext(mm_ctx, m) m.bind_to_ctx(m_ctx) if mr_out: mr_out.update(bot_rerun=False) with open(os.path.join(tmpdir, "meta.yaml"), "w") as f: f.write(inp) # read the conda-forge.yml if os.path.exists(os.path.join(tmpdir, "..", "conda-forge.yml")): with open(os.path.join(tmpdir, "..", "conda-forge.yml")) as fp: cf_yml = fp.read() else: cf_yml = "{}" # Load the meta.yaml (this is done in the graph) try: name = parse_meta_yaml(inp)["package"]["name"] except Exception: name = "blah" pmy = populate_feedstock_attributes(name, {}, inp, cf_yml) # these are here for legacy migrators pmy["version"] = pmy["meta_yaml"]["package"]["version"] pmy["req"] = set() for k in ["build", "host", "run"]: req = pmy["meta_yaml"].get("requirements", {}) or {} _set = req.get(k) or set() pmy["req"] |= set(_set) pmy["raw_meta_yaml"] = inp pmy.update(kwargs) assert m.filter(pmy) is should_filter if should_filter: return m.run_pre_piggyback_migrations( tmpdir, pmy, hash_type=pmy.get("hash_type", "sha256"), ) mr = m.migrate(tmpdir, pmy, hash_type=pmy.get("hash_type", "sha256")) m.run_post_piggyback_migrations( tmpdir, pmy, hash_type=pmy.get("hash_type", "sha256"), ) assert mr_out == mr if not mr: return pmy.update(PRed=[frozen_to_json_friendly(mr)]) with open(os.path.join(tmpdir, "meta.yaml")) as f: actual_output = f.read() # strip jinja comments pat = re.compile(r"{#.*#}") actual_output = pat.sub("", actual_output) output = pat.sub("", output) assert actual_output == output
def test_grayskull_audit_feedstock(): mm_ctx = MigratorSessionContext( graph=G, smithy_version="", pinning_version="", github_username="", github_password="", circle_build_url="", ) with open( os.path.join(os.path.dirname(__file__), "test_yaml", "depfinder.json"), "r", ) as f: attrs = load(f) fctx = FeedstockContext("depfinder", "depfinder", attrs) recipe = grayskull_audit_feedstock(fctx, mm_ctx) assert (recipe == """{% set name = "depfinder" %} {% set version = 2.3.0 %} package: name: {{ name|lower }} version: {{ version }} source: url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz sha256: 2694acbc8f7d94ca9bae55b8dc5b4860d5bc253c6a377b3b8ce63fb5bffa4000 build: number: 0 noarch: python entry_points: - depfinder = depfinder.cli:cli script: {{ PYTHON }} -m pip install . -vv requirements: host: - pip - python run: - python - pyyaml - stdlib-list test: imports: - depfinder commands: - pip check - depfinder --help requires: - pip about: home: http://github.com/ericdill/depfinder summary: Find all the imports in your library doc_url: https://pythonhosted.org/depfinder/ license: BSD-3-Clause license_file: LICENSE extra: recipe-maintainers: - ericdill - mariusvniekerk - tonyfast - ocefpaf """)
def initialize_migrators( github_username: str = "", github_password: str = "", github_token: Optional[str] = None, dry_run: bool = False, ) -> Tuple[MigratorSessionContext, list, MutableSequence[Migrator]]: temp = glob.glob("/tmp/*") gx = load_graph() smithy_version = eval_cmd("conda smithy --version").strip() pinning_version = json.loads( eval_cmd("conda list conda-forge-pinning --json"))[0]["version"] migrators = [] add_arch_migrate(migrators, gx) migration_factory(migrators, gx) add_replacement_migrator( migrators, gx, "matplotlib", "matplotlib-base", ("Unless you need `pyqt`, recipes should depend only on " "`matplotlib-base`."), alt_migrator=MatplotlibBase, ) create_migration_yaml_creator(migrators=migrators, gx=gx) print("rebuild migration graph sizes:", flush=True) for m in migrators: print( f' {getattr(m, "name", m)} graph size: ' f'{len(getattr(m, "graph", []))}', flush=True, ) print(" ", flush=True) mctx = MigratorSessionContext( circle_build_url=os.getenv("CIRCLE_BUILD_URL", ""), graph=gx, smithy_version=smithy_version, pinning_version=pinning_version, github_username=github_username, github_password=github_password, github_token=github_token, dry_run=dry_run, ) print("building package import maps and version migrator", flush=True) python_nodes = { n for n, v in mctx.graph.nodes("payload") if "python" in v.get("req", "") } python_nodes.update([ k for node_name, node in mctx.graph.nodes("payload") for k in node.get("outputs_names", []) if node_name in python_nodes ], ) version_migrator = Version( python_nodes=python_nodes, pr_limit=PR_LIMIT * 4, piggy_back_migrations=[ Jinja2VarsCleanup(), DuplicateLinesCleanup(), PipMigrator(), LicenseMigrator(), CondaForgeYAMLCleanup(), ExtraJinja2KeysCleanup(), Build2HostMigrator(), NoCondaInspectMigrator(), Cos7Config(), ], ) migrators = [version_migrator] + migrators print(" ", flush=True) return mctx, temp, migrators