def create_graph_nodes(node, graph, prior_node=None, pc=None): """Create graph from a single node, searching up and down the chain with weakrefs to nodes in the graph nodes Parameters ---------- node: Stream instance graph: networkx.DiGraph instance """ edge_kwargs = {} if node is None: return t = hash(node) graph.add_node( t, label=_clean_text(str(node)), shape=node._graphviz_shape, orientation=str(node._graphviz_orientation), style=node._graphviz_style, fillcolor=node._graphviz_fillcolor, node=ref(node), ) if prior_node: tt = hash(prior_node) # If we emit on something other than all the upstreams vis it if ( isinstance(node, combine_latest) and node.emit_on != node.upstreams and prior_node in node.emit_on ): edge_kwargs["style"] = "dashed" if graph.has_edge(t, tt): return if pc == "downstream": graph.add_edge(tt, t) else: graph.add_edge(t, tt) for nodes, pc in zip( [list(node.downstreams), list(node.upstreams)], ["downstream", "upstreams"], ): for node2 in nodes: if node2 is not None: create_graph_nodes(node2, graph, node, pc=pc)
def migrator_status( migrator: Migrator, gx: nx.DiGraph, ) -> Tuple[dict, list, nx.DiGraph]: """Gets the migrator progress for a given migrator Returns ------- out : dict Dictionary of statuses with the feedstocks in them order : Build order for this migrator """ out: Dict[str, Set[str]] = { "done": set(), "in-pr": set(), "awaiting-pr": set(), "awaiting-parents": set(), "bot-error": set(), } gx2 = copy.deepcopy(getattr(migrator, "graph", gx)) top_level = {node for node in gx2 if not list(gx2.predecessors(node))} build_sequence = list(cyclic_topological_sort(gx2, top_level)) feedstock_metadata = dict() import graphviz from streamz.graph import _clean_text gv = graphviz.Digraph(graph_attr={"packmode": "array_3"}) for node, node_attrs in gx2.nodes.items(): attrs = node_attrs["payload"] # remove archived from status if attrs.get("archived", False): continue node_metadata: Dict = {} feedstock_metadata[node] = node_metadata nuid = migrator.migrator_uid(attrs) all_pr_jsons = [] for pr_json in attrs.get("PRed", []): all_pr_jsons.append(copy.deepcopy(pr_json)) # hack around bug in migrator vs graph data for this one if isinstance(migrator, MatplotlibBase): if "name" in nuid: del nuid["name"] for i in range(len(all_pr_jsons)): if ( all_pr_jsons[i] and "name" in all_pr_jsons[i]["data"] and all_pr_jsons[i]["data"]["migrator_name"] == "MatplotlibBase" ): del all_pr_jsons[i]["data"]["name"] for pr_json in all_pr_jsons: if pr_json and pr_json["data"] == frozen_to_json_friendly(nuid)["data"]: break else: pr_json = None # No PR was ever issued but the migration was performed. # This is only the case when the migration was done manually # before the bot could issue any PR. manually_done = pr_json is None and frozen_to_json_friendly(nuid)["data"] in ( z["data"] for z in all_pr_jsons ) buildable = not migrator.filter(attrs) fntc = "black" if manually_done: out["done"].add(node) fc = "#440154" fntc = "white" elif pr_json is None: if buildable: out["awaiting-pr"].add(node) fc = "#35b779" elif not isinstance(migrator, Replacement): out["awaiting-parents"].add(node) fc = "#fde725" elif "PR" not in pr_json: out["bot-error"].add(node) fc = "#000000" fntc = "white" elif pr_json["PR"]["state"] == "closed": out["done"].add(node) fc = "#440154" fntc = "white" else: out["in-pr"].add(node) fc = "#31688e" fntc = "white" if node not in out["done"]: gv.node( node, label=_clean_text(node), fillcolor=fc, style="filled", fontcolor=fntc, URL=(pr_json or {}).get("PR", {}).get("html_url", ""), ) # additional metadata for reporting node_metadata["num_descendants"] = len(nx.descendants(gx2, node)) node_metadata["immediate_children"] = [ k for k in sorted(gx2.successors(node)) if not gx2[k].get("payload", {}).get("archived", False) ] if pr_json and "PR" in pr_json: # I needed to fake some PRs they don't have html_urls though node_metadata["pr_url"] = pr_json["PR"].get("html_url", "") out2: Dict = {} for k in out.keys(): out2[k] = list( sorted( out[k], key=lambda x: build_sequence.index(x) if x in build_sequence else -1, ), ) out2["_feedstock_status"] = feedstock_metadata for (e0, e1), edge_attrs in gx2.edges.items(): if ( e0 not in out["done"] and e1 not in out["done"] and not gx2.nodes[e0]["payload"].get("archived", False) and not gx2.nodes[e1]["payload"].get("archived", False) ): gv.edge(e0, e1) return out2, build_sequence, gv
def graph_migrator_status( migrator: Migrator, gx: nx.DiGraph, ) -> Tuple[dict, list, nx.DiGraph]: """Gets the migrator progress for a given migrator""" if hasattr(migrator, "name"): assert isinstance(migrator.name, str) migrator_name = migrator.name.lower().replace(" ", "") else: migrator_name = migrator.__class__.__name__.lower() num_viz = 0 out: Dict[str, Set[str]] = { "done": set(), "in-pr": set(), "awaiting-pr": set(), "not-solvable": set(), "awaiting-parents": set(), "bot-error": set(), } gx2 = copy.deepcopy(getattr(migrator, "graph", gx)) top_level = {node for node in gx2 if not list(gx2.predecessors(node))} build_sequence = list(cyclic_topological_sort(gx2, top_level)) feedstock_metadata = dict() import graphviz from streamz.graph import _clean_text gv = graphviz.Digraph(graph_attr={"packmode": "array_3"}) # pinning isn't actually in the migration if "conda-forge-pinning" in gx2.nodes(): gx2.remove_node("conda-forge-pinning") for node, node_attrs in gx2.nodes.items(): attrs = node_attrs["payload"] # remove archived from status if attrs.get("archived", False): continue node_metadata: Dict = {} feedstock_metadata[node] = node_metadata nuid = migrator.migrator_uid(attrs) all_pr_jsons = [] for pr_json in attrs.get("PRed", []): all_pr_jsons.append(copy.deepcopy(pr_json)) feedstock_ctx = FeedstockContext( package_name=node, feedstock_name=attrs.get("feedstock_name", node), attrs=attrs, ) # hack around bug in migrator vs graph data for this one if isinstance(migrator, MatplotlibBase): if "name" in nuid: del nuid["name"] for i in range(len(all_pr_jsons)): if (all_pr_jsons[i] and "name" in all_pr_jsons[i]["data"] and all_pr_jsons[i]["data"]["migrator_name"] == "MatplotlibBase"): del all_pr_jsons[i]["data"]["name"] for pr_json in all_pr_jsons: if pr_json and pr_json["data"] == frozen_to_json_friendly( nuid)["data"]: break else: pr_json = None # No PR was ever issued but the migration was performed. # This is only the case when the migration was done manually # before the bot could issue any PR. manually_done = pr_json is None and frozen_to_json_friendly( nuid)["data"] in (z["data"] for z in all_pr_jsons) buildable = not migrator.filter(attrs) fntc = "black" status_icon = "" if manually_done: out["done"].add(node) fc = "#440154" fntc = "white" elif pr_json is None: if buildable: if "not solvable" in (attrs.get("pre_pr_migrator_status", {}).get(migrator_name, "")): out["not-solvable"].add(node) fc = "#ff8c00" elif "bot error" in (attrs.get("pre_pr_migrator_status", {}).get(migrator_name, "")): out["bot-error"].add(node) fc = "#000000" fntc = "white" else: out["awaiting-pr"].add(node) fc = "#35b779" elif not isinstance(migrator, Replacement): if "bot error" in (attrs.get("pre_pr_migrator_status", {}).get(migrator_name, "")): out["bot-error"].add(node) fc = "#000000" fntc = "white" else: out["awaiting-parents"].add(node) fc = "#fde725" elif "PR" not in pr_json: out["bot-error"].add(node) fc = "#000000" fntc = "white" elif pr_json["PR"]["state"] == "closed": out["done"].add(node) fc = "#440154" fntc = "white" else: out["in-pr"].add(node) fc = "#31688e" fntc = "white" pr_status = pr_json["PR"]["mergeable_state"] if pr_status == "clean": status_icon = " ✓" else: status_icon = " ❎" if node not in out["done"]: num_viz += 1 gv.node( node, label=_clean_text(node) + status_icon, fillcolor=fc, style="filled", fontcolor=fntc, URL=(pr_json or {}).get("PR", {}).get( "html_url", feedstock_url(fctx=feedstock_ctx, protocol="https").strip(".git"), ), ) # additional metadata for reporting node_metadata["num_descendants"] = len(nx.descendants(gx2, node)) node_metadata["immediate_children"] = [ k for k in sorted(gx2.successors(node)) if not gx2[k].get("payload", {}).get("archived", False) ] if node in out["not-solvable"] or node in out["bot-error"]: node_metadata["pre_pr_migrator_status"] = attrs.get( "pre_pr_migrator_status", {}, ).get(migrator_name, "") else: node_metadata["pre_pr_migrator_status"] = "" if pr_json and "PR" in pr_json: # I needed to fake some PRs they don't have html_urls though node_metadata["pr_url"] = pr_json["PR"].get( "html_url", feedstock_url(fctx=feedstock_ctx, protocol="https").strip(".git"), ) node_metadata["pr_status"] = pr_json["PR"].get("mergeable_state") out2: Dict = {} for k in out.keys(): out2[k] = list( sorted( out[k], key=lambda x: build_sequence.index(x) if x in build_sequence else -1, ), ) out2["_feedstock_status"] = feedstock_metadata for (e0, e1), edge_attrs in gx2.edges.items(): if (e0 not in out["done"] and e1 not in out["done"] and not gx2.nodes[e0]["payload"].get("archived", False) and not gx2.nodes[e1]["payload"].get("archived", False)): gv.edge(e0, e1) print(" len(gv):", num_viz, flush=True) out2["_num_viz"] = num_viz return out2, build_sequence, gv