Beispiel #1
0
def add_rebuild_migration_yaml(
    migrators: MutableSequence[Migrator],
    gx: nx.DiGraph,
    package_names: Sequence[str],
    output_to_feedstock: Mapping[str, str],
    excluded_feedstocks: MutableSet[str],
    migration_yaml: str,
    config: dict = {},
    migration_name: str = "",
    pr_limit: int = PR_LIMIT,
) -> None:
    """Adds rebuild migrator.

    Parameters
    ----------
    migrators : list of Migrator
        The list of migrators to run.
    gx : networkx.DiGraph
        The feedstock graph
    package_names : list of str
        The package who's pin was moved
    output_to_feedstock : dict of str
        Mapping of output name to feedstock name
    excluded_feedstocks : set of str
        Feedstock names which should never be included in the migration
    migration_yaml : str
        The raw yaml for the migration variant dict
    config: dict
        The __migrator contents of the migration
    migration_name: str
        Name of the migration
    pr_limit : int, optional
        The number of PRs per hour, defaults to 5
    """

    total_graph = create_rebuild_graph(
        gx,
        package_names,
        excluded_feedstocks,
        include_noarch=config.get("include_noarch", False),
    )

    # Note at this point the graph is made of all packages that have a
    # dependency on the pinned package via Host, run, or test.
    # Some packages don't have a host section so we use their
    # build section in its place.

    package_names = {
        p if p in gx.nodes else output_to_feedstock[p] for p in package_names
    } - excluded_feedstocks

    top_level = {
        node
        for node in {gx.successors(package_name) for package_name in package_names}
        if (node in total_graph) and len(list(total_graph.predecessors(node))) == 0
    }
    cycles = list(nx.simple_cycles(total_graph))
    migrator = MigrationYaml(
        migration_yaml,
        graph=total_graph,
        pr_limit=pr_limit,
        name=migration_name,
        top_level=top_level,
        cycles=cycles,
        piggy_back_migrations=[
            Jinja2VarsCleanup(),
            PipMigrator(),
            LicenseMigrator(),
            CondaForgeYAMLCleanup(),
            ExtraJinja2KeysCleanup(),
        ],
        **config,
    )
    print(f"bump number is {migrator.bump_number}")
    migrators.append(migrator)
from flaky import flaky

from conda_forge_tick.migrators import Version, LicenseMigrator
from conda_forge_tick.migrators.license import _munge_licenses

from test_migrators import run_test_migration

LM = LicenseMigrator()
VER_LM = Version(set(), piggy_back_migrations=[LM])

version_license = """\
{% set version = "0.8" %}

package:
  name: viscm
  version: {{ version }}

source:
  url: https://pypi.io/packages/source/v/viscm/viscm-{{ version }}.tar.gz
  sha256: dca77e463c56d42bbf915197c9b95e98913c85bef150d2e1dd18626b8c2c9c32

build:
  number: 0
  noarch: python
  script: python -m pip install --no-deps --ignore-installed .

requirements:
  host:
    - python
    - pip
    - numpy
Beispiel #3
0
def main(args: Any = None) -> None:
    mctx, *_, migrators = initialize_migrators()
    if not os.path.exists("./status"):
        os.mkdir("./status")
    total_status = {}

    for migrator in migrators:
        if isinstance(migrator, GraphMigrator) or isinstance(
                migrator, Replacement):
            if hasattr(migrator, "name"):
                assert isinstance(migrator.name, str)
                migrator_name = migrator.name.lower().replace(" ", "")
            else:
                migrator_name = migrator.__class__.__name__.lower()
            total_status[migrator_name] = f"{migrator.name} Migration Status"
            status, build_order, gv = migrator_status(migrator, mctx.graph)
            with open(os.path.join(f"./status/{migrator_name}.json"),
                      "w") as fo:
                json.dump(status, fo, indent=2)

            d = gv.pipe("dot")
            with tempfile.NamedTemporaryFile() as ntf, open(
                    f"{ntf.name}.dot", "w") as f:
                f.write(d.decode("utf-8"))
                # make the graph a bit more compact
                d = Source(
                    subprocess.check_output([
                        "unflatten", "-f", "-l", "5", "-c", "10",
                        f"{ntf.name}.dot"
                    ]).decode("utf-8")).pipe("svg")
            with open(os.path.join(f"./status/{migrator_name}.svg"),
                      "wb") as fb:
                fb.write(d)

    with open("./status/total_status.json", "w") as f:
        json.dump(total_status, f, sort_keys=True)

    lst = [
        k for k, v in mctx.graph.nodes.items() if len([
            z for z in v.get("payload", {}).get("PRed", [])
            if z.get("PR", {}).get("state", "closed") == "open"
            and z.get("data", {}).get("migrator_name", "") == "Version"
        ], ) >= Version.max_num_prs
    ]
    with open("./status/could_use_help.json", "w") as f:
        json.dump(
            sorted(
                lst,
                key=lambda z: (len(nx.descendants(mctx.graph, z)), lst),
                reverse=True,
            ),
            f,
            indent=2,
        )

    lm = LicenseMigrator()
    lst = [
        k for k, v in mctx.graph.nodes.items()
        if not lm.filter(v.get("payload", {}))
    ]
    with open("./status/unlicensed.json", "w") as f:
        json.dump(
            sorted(
                lst,
                key=lambda z: (len(nx.descendants(mctx.graph, z)), lst),
                reverse=True,
            ),
            f,
            indent=2,
        )
Beispiel #4
0
        PackageName,
    )

logger = logging.getLogger("conda_forge_tick.auto_tick")


PR_LIMIT = 5
MAX_PR_LIMIT = 50

MIGRATORS: MutableSequence[Migrator] = [
    Version(
        pr_limit=PR_LIMIT * 2,
        piggy_back_migrations=[
            Jinja2VarsCleanup(),
            PipMigrator(),
            LicenseMigrator(),
            CondaForgeYAMLCleanup(),
            ExtraJinja2KeysCleanup(),
        ],
    ),
]

BOT_RERUN_LABEL = {
    "name": "bot-rerun",
    "color": "#191970",
    "description": (
        "Apply this label if you want the bot to retry "
        "issuing a particular pull-request"
    ),
}
Beispiel #5
0
def main(args: Any = None) -> None:
    import requests

    r = requests.get(
        "https://raw.githubusercontent.com/conda-forge/"
        "conda-forge.github.io/main/img/anvil.svg", )

    # cache these for later
    if os.path.exists("status/closed_status.json"):
        with open("status/closed_status.json") as fp:
            old_closed_status = json.load(fp)
    else:
        old_closed_status = {}

    with open("status/total_status.json") as fp:
        old_total_status = json.load(fp)

    mctx, *_, migrators = initialize_migrators()
    if not os.path.exists("./status"):
        os.mkdir("./status")
    regular_status = {}
    longterm_status = {}

    print(" ", flush=True)

    for migrator in migrators:
        if hasattr(migrator, "name"):
            assert isinstance(migrator.name, str)
            migrator_name = migrator.name.lower().replace(" ", "")
        else:
            migrator_name = migrator.__class__.__name__.lower()

        print(
            "================================================================",
            flush=True,
        )
        print("name:", migrator_name, flush=True)

        if isinstance(migrator, GraphMigrator) or isinstance(
                migrator, Replacement):
            if isinstance(migrator, GraphMigrator):
                mgconf = yaml.safe_load(
                    getattr(migrator, "yaml_contents", "{}")).get(
                        "__migrator",
                        {},
                    )
                if (mgconf.get("longterm", False)
                        or isinstance(migrator, ArchRebuild)
                        or isinstance(migrator, OSXArm)):
                    longterm_status[
                        migrator_name] = f"{migrator.name} Migration Status"
                else:
                    regular_status[
                        migrator_name] = f"{migrator.name} Migration Status"
            else:
                regular_status[
                    migrator_name] = f"{migrator.name} Migration Status"
            status, build_order, gv = graph_migrator_status(
                migrator, mctx.graph)
            num_viz = status.pop("_num_viz", 0)
            with open(os.path.join(f"./status/{migrator_name}.json"),
                      "w") as fp:
                json.dump(status, fp, indent=2)

            if num_viz <= 500:
                d = gv.pipe("dot")
                with tempfile.NamedTemporaryFile(suffix=".dot") as ntf:
                    ntf.write(d)
                    # make the graph a bit more compact
                    d = Source(
                        subprocess.check_output([
                            "unflatten", "-f", "-l", "5", "-c", "10",
                            f"{ntf.name}"
                        ], ).decode("utf-8"), ).pipe("svg")
                with open(os.path.join(f"./status/{migrator_name}.svg"),
                          "wb") as fb:
                    fb.write(d or gv.pipe("svg"))
            else:
                with open(os.path.join(f"./status/{migrator_name}.svg"),
                          "wb") as fb:
                    fb.write(r.content)

        elif isinstance(migrator, Version):
            write_version_migrator_status(migrator, mctx)

        print(" ", flush=True)

    print("writing data", flush=True)
    with open("./status/regular_status.json", "w") as f:
        json.dump(regular_status, f, sort_keys=True, indent=2)

    with open("./status/longterm_status.json", "w") as f:
        json.dump(longterm_status, f, sort_keys=True, indent=2)

    total_status = {}
    total_status.update(regular_status)
    total_status.update(longterm_status)
    with open("./status/total_status.json", "w") as f:
        json.dump(total_status, f, sort_keys=True, indent=2)

    closed_status = _compute_recently_closed(
        total_status,
        old_closed_status,
        old_total_status,
    )
    with open("./status/closed_status.json", "w") as f:
        json.dump(closed_status, f, sort_keys=True, indent=2)

    print("\ncomputing feedstock and PR stats", flush=True)

    def _get_needs_help(k):
        v = mctx.graph.nodes[k]
        if (len([
                z for z in v.get("payload", {}).get("PRed", [])
                if z.get("PR", {}).get("state", "closed") == "open"
                and z.get("data", {}).get("migrator_name", "") == "Version"
        ], ) >= Version.max_num_prs):
            return k
        else:
            return None

    lst = _collect_items_from_nodes(mctx.graph, _get_needs_help)
    with open("./status/could_use_help.json", "w") as f:
        json.dump(
            sorted(
                lst,
                key=lambda z: (len(nx.descendants(mctx.graph, z)), lst),
                reverse=True,
            ),
            f,
            indent=2,
        )

    lm = LicenseMigrator()

    def _get_needs_license(k):
        v = mctx.graph.nodes[k]
        if not lm.filter(v.get("payload", {})):
            return k
        else:
            return None

    lst = _collect_items_from_nodes(mctx.graph, _get_needs_license)
    with open("./status/unlicensed.json", "w") as f:
        json.dump(
            sorted(
                lst,
                key=lambda z: (len(nx.descendants(mctx.graph, z)), lst),
                reverse=True,
            ),
            f,
            indent=2,
        )

    def _get_open_pr_states(k):
        attrs = mctx.graph.nodes[k]
        _open_prs = []
        for pr in attrs.get("PRed", []):
            if pr.get("PR", {}).get("state", "closed") != "closed":
                _open_prs.append(pr["PR"])

        return _open_prs

    open_prs = []
    for op in _collect_items_from_nodes(mctx.graph, _get_open_pr_states):
        open_prs.extend(op)
    merge_state_count = Counter([o["mergeable_state"] for o in open_prs])
    with open("./status/pr_state.csv", "a") as f:
        writer = csv.writer(f)
        writer.writerow([merge_state_count[k] for k in GH_MERGE_STATE_STATUS])
Beispiel #6
0
def add_rebuild_migration_yaml(
    migrators: MutableSequence[Migrator],
    gx: nx.DiGraph,
    package_names: Sequence[str],
    output_to_feedstock: Mapping[str, str],
    excluded_feedstocks: MutableSet[str],
    migration_yaml: str,
    config: dict = {},
    migration_name: str = "",
    pr_limit: int = PR_LIMIT,
) -> None:
    """Adds rebuild migrator.

    Parameters
    ----------
    migrators : list of Migrator
        The list of migrators to run.
    gx : networkx.DiGraph
        The feedstock graph
    package_names : list of str
        The package who's pin was moved
    output_to_feedstock : dict of str
        Mapping of output name to feedstock name
    excluded_feedstocks : set of str
        Feedstock names which should never be included in the migration
    migration_yaml : str
        The raw yaml for the migration variant dict
    config: dict
        The __migrator contents of the migration
    migration_name: str
        Name of the migration
    pr_limit : int, optional
        The number of PRs per hour, defaults to 5
    """

    total_graph = copy.deepcopy(gx)

    for node, node_attrs in gx.nodes.items():
        attrs: "AttrsTypedDict" = node_attrs["payload"]
        requirements = attrs.get("requirements", {})
        host = requirements.get("host", set())
        build = requirements.get("build", set())
        bh = host or build
        criteria = bh & set(package_names) and (
            "noarch" not in attrs.get("meta_yaml", {}).get("build", {})
        )
        # get host/build, run and test and launder them through outputs
        # this should fix outputs related issues (eg gdal)
        rq = set(
            map(
                lambda x: gx.graph["outputs_lut"].get(x, x),
                (host or build)
                | requirements.get("run", set())
                | requirements.get("test", set()),
            )
        )

        for e in list(total_graph.in_edges(node)):
            if e[0] not in rq:
                total_graph.remove_edge(*e)
        if not any([criteria]) or node in excluded_feedstocks:
            pluck(total_graph, node)

    # post plucking we can have several strange cases, lets remove all selfloops
    total_graph.remove_edges_from(nx.selfloop_edges(total_graph))

    package_names = {
        p if p in gx.nodes else output_to_feedstock[p] for p in package_names
    } - excluded_feedstocks

    top_level = {
        node
        for node in {gx.successors(package_name) for package_name in package_names}
        if (node in total_graph) and len(list(total_graph.predecessors(node))) == 0
    }
    cycles = list(nx.simple_cycles(total_graph))
    migrator = MigrationYaml(
        migration_yaml,
        graph=total_graph,
        pr_limit=pr_limit,
        name=migration_name,
        top_level=top_level,
        cycles=cycles,
        piggy_back_migrations=[
            PipMigrator(),
            LicenseMigrator(),
            CondaForgeYAMLCleanup(),
            ExtraJinja2KeysCleanup(),
        ],
        **config,
    )
    print(f"bump number is {migrator.bump_number}")
    migrators.append(migrator)
Beispiel #7
0
def add_rebuild_migration_yaml(
    migrators: MutableSequence[Migrator],
    gx: nx.DiGraph,
    package_names: Sequence[str],
    migration_yaml: str,
    config: dict = {},
    migration_name: str = "",
    pr_limit: int = 50,
) -> None:
    """Adds rebuild migrator.

    Parameters
    ----------
    migrators : list of Migrator
        The list of migrators to run.
    gx : networkx.DiGraph
        The feedstock graph
    package_names : list of str
        The package who's pin was moved
    migration_yaml : str
        The raw yaml for the migration variant dict
    config: dict
        The __migrator contents of the migration
    migration_name: str
        Name of the migration
    pr_limit : int, optional
        The number of PRs per hour, defaults to 5
    """

    total_graph = copy.deepcopy(gx)

    for node, node_attrs in gx.nodes.items():
        attrs: "AttrsTypedDict" = node_attrs["payload"]
        meta_yaml = attrs.get("meta_yaml", {}) or {}
        # TODO: fix this, since it doesn't fully apply the strong constraints
        if "strong" in meta_yaml.get("build", {}) or any(
            [
                "strong" in output.get("build", {})
                for output in meta_yaml.get("outputs", [])
                if output.get("build")
            ],
        ):
            bh = get_requirements(meta_yaml, run=False)
        else:
            bh = get_requirements(
                meta_yaml, run=False, build=False, host=True,
            ) or get_requirements(meta_yaml, build=True, run=False, host=False)
        criteria = any(package_name in bh for package_name in package_names) and (
            "noarch" not in meta_yaml.get("build", {})
        )

        rq = _host_run_test_dependencies(meta_yaml)

        for e in list(total_graph.in_edges(node)):
            if e[0] not in rq:
                total_graph.remove_edge(*e)
        if not any([criteria]):
            pluck(total_graph, node)

    # post plucking we can have several strange cases, lets remove all selfloops
    total_graph.remove_edges_from(nx.selfloop_edges(total_graph))

    top_level = {
        node
        for node in {gx.successors(package_name) for package_name in package_names}
        if (node in total_graph) and len(list(total_graph.predecessors(node))) == 0
    }
    cycles = list(nx.simple_cycles(total_graph))
    migrator = MigrationYaml(
        migration_yaml,
        graph=total_graph,
        pr_limit=pr_limit,
        name=migration_name,
        top_level=top_level,
        cycles=cycles,
        piggy_back_migrations=[PipMigrator(), LicenseMigrator()],
        **config,
    )
    print(f"bump number is {migrator.bump_number}")
    migrators.append(migrator)
Beispiel #8
0
    Version,
    PipMigrator,
    LicenseMigrator,
    MigrationYaml,
    GraphMigrator,
    Replacement,
    ArchRebuild,
)

if typing.TYPE_CHECKING:
    from .cli import CLIArgs
    from .migrators_types import *


MIGRATORS: MutableSequence[Migrator] = [
    Version(pr_limit=30, piggy_back_migrations=[PipMigrator(), LicenseMigrator()]),
]

BOT_RERUN_LABEL = {
    "name": "bot-rerun",
    "color": "#191970",
    "description": "Apply this label if you want the bot to retry issueing a particular pull-request",
}


def run(
    feedstock_ctx: FeedstockContext,
    migrator: Migrator,
    protocol: str = "ssh",
    pull_request: bool = True,
    rerender: bool = True,
Beispiel #9
0
def initialize_migrators(
    github_username: str = "",
    github_password: str = "",
    github_token: Optional[str] = None,
    dry_run: bool = False,
) -> Tuple[MigratorSessionContext, list, MutableSequence[Migrator]]:
    temp = glob.glob("/tmp/*")
    gx = load_graph()
    smithy_version = eval_cmd("conda smithy --version").strip()
    pinning_version = json.loads(
        eval_cmd("conda list conda-forge-pinning --json"))[0]["version"]

    migrators = []

    add_arch_migrate(migrators, gx)
    migration_factory(migrators, gx)
    add_replacement_migrator(
        migrators,
        gx,
        "matplotlib",
        "matplotlib-base",
        ("Unless you need `pyqt`, recipes should depend only on "
         "`matplotlib-base`."),
        alt_migrator=MatplotlibBase,
    )
    create_migration_yaml_creator(migrators=migrators, gx=gx)
    print("rebuild migration graph sizes:", flush=True)
    for m in migrators:
        print(
            f'    {getattr(m, "name", m)} graph size: '
            f'{len(getattr(m, "graph", []))}',
            flush=True,
        )
    print(" ", flush=True)

    mctx = MigratorSessionContext(
        circle_build_url=os.getenv("CIRCLE_BUILD_URL", ""),
        graph=gx,
        smithy_version=smithy_version,
        pinning_version=pinning_version,
        github_username=github_username,
        github_password=github_password,
        github_token=github_token,
        dry_run=dry_run,
    )

    print("building package import maps and version migrator", flush=True)
    python_nodes = {
        n
        for n, v in mctx.graph.nodes("payload")
        if "python" in v.get("req", "")
    }
    python_nodes.update([
        k for node_name, node in mctx.graph.nodes("payload")
        for k in node.get("outputs_names", []) if node_name in python_nodes
    ], )
    version_migrator = Version(
        python_nodes=python_nodes,
        pr_limit=PR_LIMIT * 4,
        piggy_back_migrations=[
            Jinja2VarsCleanup(),
            DuplicateLinesCleanup(),
            PipMigrator(),
            LicenseMigrator(),
            CondaForgeYAMLCleanup(),
            ExtraJinja2KeysCleanup(),
            Build2HostMigrator(),
            NoCondaInspectMigrator(),
            Cos7Config(),
        ],
    )

    migrators = [version_migrator] + migrators

    print(" ", flush=True)

    return mctx, temp, migrators
Beispiel #10
0
def main(args: Any = None) -> None:
    mctx, *_, migrators = initialize_migrators()
    if not os.path.exists("./status"):
        os.mkdir("./status")
    total_status = {}

    for migrator in migrators:
        if isinstance(migrator, GraphMigrator) or isinstance(
                migrator, Replacement):
            if hasattr(migrator, "name"):
                assert isinstance(migrator.name, str)
                migrator_name = migrator.name.lower().replace(" ", "")
            else:
                migrator_name = migrator.__class__.__name__.lower()
            total_status[migrator_name] = f"{migrator.name} Migration Status"
            status, build_order, gv = graph_migrator_status(
                migrator, mctx.graph)
            with open(os.path.join(f"./status/{migrator_name}.json"),
                      "w") as fp:
                json.dump(status, fp, indent=2)

            d = gv.pipe("dot")
            with tempfile.NamedTemporaryFile(suffix=".dot") as ntf:
                ntf.write(d)
                # make the graph a bit more compact
                d = Source(
                    subprocess.check_output([
                        "unflatten", "-f", "-l", "5", "-c", "10", f"{ntf.name}"
                    ], ).decode("utf-8"), ).pipe("svg")
            with open(os.path.join(f"./status/{migrator_name}.svg"),
                      "wb") as fb:
                fb.write(d or gv.pipe("svg"))
        elif isinstance(migrator, Version):
            write_version_migrator_status(migrator, mctx)

    with open("./status/total_status.json", "w") as f:
        json.dump(total_status, f, sort_keys=True)

    lst = [
        k for k, v in mctx.graph.nodes.items() if len([
            z for z in v.get("payload", {}).get("PRed", [])
            if z.get("PR", {}).get("state", "closed") == "open"
            and z.get("data", {}).get("migrator_name", "") == "Version"
        ], ) >= Version.max_num_prs
    ]
    with open("./status/could_use_help.json", "w") as f:
        json.dump(
            sorted(
                lst,
                key=lambda z: (len(nx.descendants(mctx.graph, z)), lst),
                reverse=True,
            ),
            f,
            indent=2,
        )

    lm = LicenseMigrator()
    lst = [
        k for k, v in mctx.graph.nodes.items()
        if not lm.filter(v.get("payload", {}))
    ]
    with open("./status/unlicensed.json", "w") as f:
        json.dump(
            sorted(
                lst,
                key=lambda z: (len(nx.descendants(mctx.graph, z)), lst),
                reverse=True,
            ),
            f,
            indent=2,
        )
    open_prs = []
    for node, attrs in mctx.graph.nodes("payload"):
        for pr in attrs.get("PRed", []):
            if pr.get("PR", {}).get("state", "closed") != "closed":
                open_prs.append(pr["PR"])
    merge_state_count = Counter([o["mergeable_state"] for o in open_prs])
    with open("./status/pr_state.csv", "a") as f:
        writer = csv.writer(f)
        writer.writerow([merge_state_count[k] for k in GH_MERGE_STATE_STATUS])
Beispiel #11
0
def main(args: "CLIArgs") -> None:
    # start profiler
    profile_profiler = cProfile.Profile()
    profile_profiler.enable()

    # logging
    from .xonsh_utils import env

    debug = env.get("CONDA_FORGE_TICK_DEBUG", False)
    if debug:
        setup_logger(logging.getLogger("conda_forge_tick"), level="debug")
    else:
        setup_logger(logging.getLogger("conda_forge_tick"))

    github_username = env.get("USERNAME", "")
    github_password = env.get("PASSWORD", "")
    github_token = env.get("GITHUB_TOKEN")
    global MIGRATORS
    mctx, temp, MIGRATORS = initialize_migrators(
        github_username=github_username,
        github_password=github_password,
        dry_run=args.dry_run,
        github_token=github_token,
    )
    python_nodes = {
        n
        for n, v in mctx.graph.nodes("payload")
        if "python" in v.get("req", "")
    }
    python_nodes.update([
        k for node_name, node in mctx.graph.nodes("payload")
        for k in node.get("outputs_names", []) if node_name in python_nodes
    ], )
    imports_by_package, packages_by_import = create_package_import_maps(
        python_nodes)
    version_migrator = Version(
        python_nodes=python_nodes,
        imports_by_package=imports_by_package,
        packages_by_import=packages_by_import,
        pr_limit=PR_LIMIT * 2,
        piggy_back_migrations=[
            Jinja2VarsCleanup(),
            PipMigrator(),
            LicenseMigrator(),
            CondaForgeYAMLCleanup(),
            ExtraJinja2KeysCleanup(),
        ],
    )

    MIGRATORS = [version_migrator] + MIGRATORS

    # compute the time per migrator
    (num_nodes, time_per_migrator,
     tot_time_per_migrator) = _compute_time_per_migrator(mctx, )
    for i, migrator in enumerate(MIGRATORS):
        if hasattr(migrator, "name"):
            extra_name = "-%s" % migrator.name
        else:
            extra_name = ""

        logger.info(
            "Total migrations for %s%s: %d - gets %f seconds (%f percent)",
            migrator.__class__.__name__,
            extra_name,
            num_nodes[i],
            time_per_migrator[i],
            time_per_migrator[i] / tot_time_per_migrator * 100,
        )

    for mg_ind, migrator in enumerate(MIGRATORS):

        mmctx = MigratorContext(session=mctx, migrator=migrator)
        migrator.bind_to_ctx(mmctx)

        good_prs = 0
        _mg_start = time.time()
        effective_graph = mmctx.effective_graph
        time_per = time_per_migrator[mg_ind]

        if hasattr(migrator, "name"):
            extra_name = "-%s" % migrator.name
        else:
            extra_name = ""

        logger.info(
            "Running migrations for %s%s: %d",
            migrator.__class__.__name__,
            extra_name,
            len(effective_graph.nodes),
        )

        possible_nodes = list(migrator.order(effective_graph, mctx.graph))

        # version debugging info
        if isinstance(migrator, Version):
            logger.info("possible version migrations:")
            for node_name in possible_nodes:
                with effective_graph.nodes[node_name]["payload"] as attrs:
                    logger.info(
                        "    node|curr|new|attempts: %s|%s|%s|%d",
                        node_name,
                        attrs.get("version"),
                        attrs.get("new_version"),
                        (attrs.get("new_version_attempts", {}).get(
                            attrs.get("new_version", ""),
                            0,
                        )),
                    )

        for node_name in possible_nodes:
            with mctx.graph.nodes[node_name]["payload"] as attrs:
                # Don't let CI timeout, break ahead of the timeout so we make certain
                # to write to the repo
                # TODO: convert these env vars
                _now = time.time()
                if ((_now - int(env.get("START_TIME", time.time())) > int(
                        env.get("TIMEOUT", 600)))
                        or good_prs >= migrator.pr_limit
                        or (_now - _mg_start) > time_per):
                    break

                fctx = FeedstockContext(
                    package_name=node_name,
                    feedstock_name=attrs["feedstock_name"],
                    attrs=attrs,
                )

                print("\n", flush=True, end="")
                logger.info(
                    "%s%s IS MIGRATING %s",
                    migrator.__class__.__name__.upper(),
                    extra_name,
                    fctx.package_name,
                )
                try:
                    # Don't bother running if we are at zero
                    if (args.dry_run or mctx.gh.rate_limit()["resources"]
                        ["core"]["remaining"] == 0):
                        break
                    migrator_uid, pr_json = run(
                        feedstock_ctx=fctx,
                        migrator=migrator,
                        rerender=migrator.rerender,
                        protocol="https",
                        hash_type=attrs.get("hash_type", "sha256"),
                    )
                    # if migration successful
                    if migrator_uid:
                        d = frozen_to_json_friendly(migrator_uid)
                        # if we have the PR already do nothing
                        if d["data"] in [
                                existing_pr["data"]
                                for existing_pr in attrs.get("PRed", [])
                        ]:
                            pass
                        else:
                            if not pr_json:
                                pr_json = {
                                    "state": "closed",
                                    "head": {
                                        "ref": "<this_is_not_a_branch>"
                                    },
                                }
                            d["PR"] = pr_json
                            attrs.setdefault("PRed", []).append(d)
                        attrs.update(
                            {
                                "smithy_version": mctx.smithy_version,
                                "pinning_version": mctx.pinning_version,
                            }, )

                except github3.GitHubError as e:
                    if e.msg == "Repository was archived so is read-only.":
                        attrs["archived"] = True
                    else:
                        logger.critical(
                            "GITHUB ERROR ON FEEDSTOCK: %s",
                            fctx.feedstock_name,
                        )
                        if is_github_api_limit_reached(e, mctx.gh):
                            break
                except URLError as e:
                    logger.exception("URLError ERROR")
                    attrs["bad"] = {
                        "exception": str(e),
                        "traceback": str(traceback.format_exc()).split("\n"),
                        "code": getattr(e, "code"),
                        "url": getattr(e, "url"),
                    }
                except Exception as e:
                    logger.exception("NON GITHUB ERROR")
                    attrs["bad"] = {
                        "exception": str(e),
                        "traceback": str(traceback.format_exc()).split("\n"),
                    }
                else:
                    if migrator_uid:
                        # On successful PR add to our counter
                        good_prs += 1
                finally:
                    # Write graph partially through
                    if not args.dry_run:
                        dump_graph(mctx.graph)

                    eval_cmd(f"rm -rf {mctx.rever_dir}/*")
                    logger.info(os.getcwd())
                    for f in glob.glob("/tmp/*"):
                        if f not in temp:
                            eval_cmd(f"rm -rf {f}")

    if not args.dry_run:
        logger.info(
            "API Calls Remaining: %d",
            mctx.gh.rate_limit()["resources"]["core"]["remaining"],
        )
    logger.info("Done")

    # stop profiler
    profile_profiler.disable()

    # human readable
    s_stream = io.StringIO()

    # TODO: There are other ways to do this, with more freedom
    profile_stats = pstats.Stats(profile_profiler,
                                 stream=s_stream).sort_stats("tottime", )
    profile_stats.print_stats()

    # get current time
    now = datetime.now()
    current_time = now.strftime("%d-%m-%Y") + "_" + now.strftime("%H_%M_%S")

    # output to data
    os.makedirs("profiler", exist_ok=True)
    with open(f"profiler/{current_time}.txt", "w+") as f:
        f.write(s_stream.getvalue())