def main(args: Any = None) -> None:
    if CONDA_FORGE_TICK_DEBUG:
        setup_logger(logger, level="debug")
    else:
        setup_logger(logger)

    logger.info("Reading graph")
    gx = load_graph()

    update_upstream_versions(gx)

    logger.info("writing out file")
    dump_graph(gx)
Пример #2
0
def main(args):
    gx = load_graph()
    ctx = MigratorSessionContext("", "", "")
    start_time = time.time()

    os.makedirs("audits", exist_ok=True)
    for k in AUDIT_REGISTRY:
        os.makedirs(os.path.join("audits", k), exist_ok=True)

    # TODO: generalize for cran skeleton
    # limit graph to things that depend on python
    python_des = nx.descendants(gx, "pypy-meta")
    for node in sorted(
            python_des,
            key=lambda x: (len(nx.descendants(gx, x)), x),
            reverse=True,
    ):
        if time.time() - int(env.get("START_TIME", start_time)) > int(
                env.get("TIMEOUT", 60 * 30), ):
            break
        # depfinder only work on python at the moment so only work on things
        # with python as runtime dep
        payload = gx.nodes[node]["payload"]
        for k, v in AUDIT_REGISTRY.items():
            version = payload.get("version", None)
            ext = v["ext"]
            if (not payload.get("archived", False) and version
                    and "python" in payload["requirements"]["run"]
                    and f"{node}_{version}.{ext}"
                    not in os.listdir(f"audits/{k}")):
                print(node)
                fctx = FeedstockContext(
                    package_name=node,
                    feedstock_name=payload["name"],
                    attrs=payload,
                )
                try:
                    deps = v["run"](fctx, ctx)
                except Exception as e:
                    deps = {
                        "exception": str(e),
                        "traceback": str(traceback.format_exc()).split("\n"),
                    }
                    if "dumper" in v:
                        deps = v["dumper"](deps)
                finally:
                    with open(f"audits/{k}/{node}_{version}.{ext}", "w") as f:
                        v["writer"](deps, f)

    compare_grayskull_audits(gx)
    compare_depfinder_audits(gx)
def main(args: Any = None) -> None:
    if CONDA_FORGE_TICK_DEBUG:
        setup_logger(logger, level="debug")
    else:
        setup_logger(logger)

    logger.info("Reading graph")
    # Graph enabled for inspection
    gx = load_graph()

    # call update
    to_update = new_update_upstream_versions(gx)

    logger.info("writing out file")
    with open("new_version.json", "w") as outfile:
        json.dump(to_update, outfile)
Пример #4
0
def initialize_migrators(github_username="",
                         github_password="",
                         github_token=None,
                         dry_run=False):
    '''
    Setup graph, required contexts, and migrators

    Parameters
    ----------
    github_username: str, optional
        Username for bot on GitHub
    github_password: str, optional
        Password for bot on GitHub
    github_token: str, optional
        Token for bot on GitHub
    dry_run: bool, optional
        If true, does not submit pull requests on GitHub

    Returns
    -------
    tuple
        Migrator session to interact with GitHub and list of migrators.
        Currently only returns pre-defined migrators.
    '''
    gx = load_graph()
    smithy_version = eval_cmd("conda smithy --version").strip()
    pinning_version = json.loads(
        eval_cmd("conda list conda-forge-pinning --json"))[0]["version"]
    for m in MIGRATORS:
        print(
            f'{getattr(m, "name", m)} graph size: {len(getattr(m, "graph", []))}'
        )

    ctx = MigratorSessionContext(
        circle_build_url=os.getenv("CIRCLE_BUILD_URL", ""),
        graph=gx,
        smithy_version=smithy_version,
        pinning_version=pinning_version,
        github_username=github_username,
        github_password=github_password,
        github_token=github_token,
        dry_run=dry_run,
    )

    return ctx, MIGRATORS
Пример #5
0
def main(args: Any = None) -> None:
    sources = (PyPI(), CRAN(), NPM(), ROSDistro(), RawURL(), Github())

    if CONDA_FORGE_TICK_DEBUG:
        setup_logger(logger, level="debug")
    else:
        setup_logger(logger)

    logger.info("Reading graph")
    # Graph enabled for inspection
    gx = load_graph()

    # call update
    new_update_upstream_versions(gx)

    logger.info("writing out file")
    with open("new_version.json", "w") as outfile:
        json.dump(to_update, outfile)
Пример #6
0
def _query_graph_handle_args(args):
    from conda_forge_tick.utils import load_graph
    if args.filepath != 'graph.json':
        copyfile(args.filepath, 'graph.json')
    gx = load_graph()
    if args.query == 'depends_on':
        dependencies = list_dependencies_on(gx, args.package)
        print(
            f'The following packages require {args.package} to be installed:')
        for dep in dependencies:
            print(dep)
        print(f'Total: {len(dependencies)}')
    elif args.query == 'depends_of':
        dependencies = list_dependencies_of(gx, args.package)
        print(
            f'{args.package} requires the following packages to be installed:')
        for dep in dependencies:
            print(dep)
        print(f'Total: {len(dependencies)}')
    else:
        print(f'Unknown query type: {args.query}')
Пример #7
0
def _make_graph_handle_args(args):
    from conda_forge_tick.utils import load_graph, dump_graph
    from conda_forge_tick.make_graph import update_nodes_with_bot_rerun
    # get a list of all feedstocks from nsls-ii-forge
    global DEBUG
    DEBUG = args.debug
    global MAX_WORKERS
    MAX_WORKERS = args.max_workers
    organization = args.organization
    names = get_all_feedstocks(cached=args.cached,
                               filepath=args.filepath,
                               organization=organization)
    if os.path.exists("graph.json"):
        gx = load_graph()
    else:
        gx = None
    print(f'Using {MAX_WORKERS} workers in process pool')
    gx = make_graph(names, organization, gx=gx)
    print("nodes w/o payload:",
          [k for k, v in gx.nodes.items() if "payload" not in v])
    update_nodes_with_bot_rerun(gx)
    print('Saving graph to graph.json')
    dump_graph(gx)
Пример #8
0
def main(args):
    gx = load_graph()
    ctx = MigratorSessionContext("", "", "")
    start_time = time.time()
    # limit graph to things that depend on python
    python_des = nx.descendants(gx, "pypy-meta")
    for node in sorted(
            python_des,
            key=lambda x: (len(nx.descendants(gx, x)), x),
            reverse=True,
    ):
        if time.time() - int(env.get("START_TIME", start_time)) > int(
                env.get("TIMEOUT", 60 * 30)):
            break
        # depfinder only work on python at the moment so only work on things
        # with python as runtime dep
        os.makedirs("audits", exist_ok=True)
        with gx.nodes[node]["payload"] as payload:
            version = payload.get('version', None)
            if (not payload.get("archived", False) and version
                    and "python" in payload["requirements"]["run"]
                    and f'{node}_{version}.json' not in os.listdir("audits")):
                print(node)
                fctx = FeedstockContext(package_name=node,
                                        feedstock_name=payload["name"],
                                        attrs=payload)
                try:
                    deps = audit_feedstock(fctx, ctx)
                except Exception as e:
                    deps = {
                        "exception": str(e),
                        "traceback": str(traceback.format_exc()).split("\n"),
                    }
                finally:
                    with open(f"audits/{node}_{version}.json", "w") as f:
                        dump(deps, f)
Пример #9
0
def main(args):
    gx = load_graph()
    ctx = MigratorSessionContext("", "", "")
    start_time = time.time()

    os.makedirs("audits", exist_ok=True)
    for k, v in AUDIT_REGISTRY.items():
        audit_dir = os.path.join("audits", k)
        version_path = os.path.join(audit_dir, "_version.json")
        audit_version = "_".join([v["version"], v["creation_version"]])
        if os.path.exists(version_path):
            version = load(open(version_path))
            # if the version of the code generating the audits is different from our current audit data
            # clear out the audit data so we always use the latest version
            if version != audit_version:
                shutil.rmtree(audit_dir)
        os.makedirs(audit_dir, exist_ok=True)
        dump(audit_version, open(version_path, "w"))

    # TODO: generalize for cran skeleton
    # limit graph to things that depend on python
    python_des = nx.descendants(gx, "python")
    for node in sorted(
        python_des,
        key=lambda x: (len(nx.descendants(gx, x)), x),
        reverse=True,
    ):
        if time.time() - int(env.get("START_TIME", start_time)) > int(
            env.get("TIMEOUT", 60 * RUNTIME_MINUTES),
        ):
            break
        # depfinder only work on python at the moment so only work on things
        # with python as runtime dep
        payload = gx.nodes[node]["payload"]
        for k, v in AUDIT_REGISTRY.items():
            version = payload.get("version", None)
            ext = v["ext"]
            if (
                not payload.get("archived", False)
                and not payload.get("bad", False)
                and version
                and "python" in payload["requirements"]["run"]
                and f"{node}_{version}.{ext}" not in os.listdir(f"audits/{k}")
            ):
                fctx = FeedstockContext(
                    package_name=node,
                    feedstock_name=payload["feedstock_name"],
                    attrs=payload,
                )
                try:
                    deps = v["run"](fctx, ctx)
                except Exception as e:
                    deps = {
                        "exception": str(e),
                        "traceback": str(traceback.format_exc()).split("\n"),
                    }
                    if "dumper" in v:
                        deps = v["dumper"](deps)
                finally:
                    if deps:
                        with open(f"audits/{k}/{node}_{version}.{ext}", "w") as f:
                            v["writer"](deps, f)

    # grayskull_audit_outcome = compare_grayskull_audits(gx)
    # compute_grayskull_accuracy(grayskull_audit_outcome)
    depfinder_audit_outcome = compare_depfinder_audits(gx)
    compute_depfinder_accuracy(depfinder_audit_outcome)
from conda_forge_tick.utils import load_graph, dump_graph_json

gx = load_graph()
for node in gx.nodes:
    with gx.nodes[node]["payload"] as node_attrs:
        prs = node_attrs.get("PRed", [])
        for i, pr in enumerate(prs):
            if "bot_rerun" in pr:
                print("fixing:", node)
                pr["data"]["bot_rerun"] = pr["bot_rerun"]
                del pr["bot_rerun"]

dump_graph_json(gx, "graph.json")
Пример #11
0
def initialize_migrators(
    github_username: str = "",
    github_password: str = "",
    github_token: Optional[str] = None,
    dry_run: bool = False,
) -> Tuple[MigratorSessionContext, list, MutableSequence[Migrator]]:
    temp = glob.glob("/tmp/*")
    gx = load_graph()
    smithy_version = eval_cmd("conda smithy --version").strip()
    pinning_version = json.loads(
        eval_cmd("conda list conda-forge-pinning --json"))[0]["version"]

    migrators = []

    add_arch_migrate(migrators, gx)
    migration_factory(migrators, gx)
    add_replacement_migrator(
        migrators,
        gx,
        "matplotlib",
        "matplotlib-base",
        ("Unless you need `pyqt`, recipes should depend only on "
         "`matplotlib-base`."),
        alt_migrator=MatplotlibBase,
    )
    create_migration_yaml_creator(migrators=migrators, gx=gx)
    print("rebuild migration graph sizes:", flush=True)
    for m in migrators:
        print(
            f'    {getattr(m, "name", m)} graph size: '
            f'{len(getattr(m, "graph", []))}',
            flush=True,
        )
    print(" ", flush=True)

    mctx = MigratorSessionContext(
        circle_build_url=os.getenv("CIRCLE_BUILD_URL", ""),
        graph=gx,
        smithy_version=smithy_version,
        pinning_version=pinning_version,
        github_username=github_username,
        github_password=github_password,
        github_token=github_token,
        dry_run=dry_run,
    )

    print("building package import maps and version migrator", flush=True)
    python_nodes = {
        n
        for n, v in mctx.graph.nodes("payload")
        if "python" in v.get("req", "")
    }
    python_nodes.update([
        k for node_name, node in mctx.graph.nodes("payload")
        for k in node.get("outputs_names", []) if node_name in python_nodes
    ], )
    version_migrator = Version(
        python_nodes=python_nodes,
        pr_limit=PR_LIMIT * 4,
        piggy_back_migrations=[
            Jinja2VarsCleanup(),
            DuplicateLinesCleanup(),
            PipMigrator(),
            LicenseMigrator(),
            CondaForgeYAMLCleanup(),
            ExtraJinja2KeysCleanup(),
            Build2HostMigrator(),
            NoCondaInspectMigrator(),
            Cos7Config(),
        ],
    )

    migrators = [version_migrator] + migrators

    print(" ", flush=True)

    return mctx, temp, migrators
Пример #12
0
def _update_handle_args(args):
    from conda_forge_tick.utils import load_graph
    if args.filepath != 'graph.json':
        copyfile(args.filepath, 'graph.json')
    gx = load_graph()
    update_versions_in_graph(gx)