def load_dependency_graph(self): dep_path = Config.get("dependency_graph") self.log.info('Loading model dependency graph', path = dep_path) try: dep_graph_str = open(dep_path).read() # joint_dependencies is of the form { Model1 -> [(Model2, src_port, dst_port), ...] } # src_port is the field that accesses Model2 from Model1 # dst_port is the field that accesses Model1 from Model2 joint_dependencies = json.loads(dep_graph_str) model_dependency_graph = DiGraph() for src_model, deps in joint_dependencies.items(): for dep in deps: dst_model, src_accessor, dst_accessor = dep if src_model != dst_model: edge_label = {'src_accessor': src_accessor, 'dst_accessor': dst_accessor} model_dependency_graph.add_edge( src_model, dst_model, edge_label) model_dependency_graph_rev = model_dependency_graph.reverse( copy=True) self.model_dependency_graph = { # deletion True: model_dependency_graph_rev, False: model_dependency_graph } self.log.info("Loaded dependencies", edges = model_dependency_graph.edges()) except Exception as e: self.log.exception("Error loading dependency graph", e = e) raise e
def __init__( self, g: nx.DiGraph, starts: List[int], ends: List[int], network_input_sizes: Dict[int, int], node_output_sizes: Dict[int, int], node_output_dimensions: Dict[int, int], network_output_sizes: Dict[int, int], output_channel_candidates: List[int], n_nodes: int, batch_size: int, ): self.g = g self.g_inv = g.reverse() self.starts = starts self.ends = ends self.network_input_sizes = network_input_sizes self.node_output_sizes = node_output_sizes self.node_output_dimensions = node_output_dimensions self.network_output_sizes = network_output_sizes self.batch_size = batch_size self.node_input_sizes = self.get_input_sizes() self.node_input_dimensions = self.get_input_dimensions() self.output_channels = self.__calc_output_channels( output_channel_candidates) self.module_vec = ModuleVec() [ self.module_vec.register(l) for l in [ 'identity', 'concat', 'gcn', 'linear', 'flatten', 'out_channels', 'relu', 'pool' ] ] self.n_nodes = n_nodes self.n_features = self.module_vec.num_features
def create_forward_dominators(cfg: nx.DiGraph): # 前向cfg:将cfg反转 reverse_cfg = cfg.reverse() # 获得前向支配关系: immediate forward dominator for each node ifdom_list = nx.algorithms.immediate_dominators(reverse_cfg, "EXIT_POINT") del ifdom_list["EXIT_POINT"] return ifdom_list
def update_from_db(self, session): # type: (Session) -> None # Only allow one thread at a time to construct a fresh graph. with self._update_lock: checkpoint, checkpoint_time = self._get_checkpoint(session) if checkpoint == self.checkpoint: self._logger.debug("Checkpoint hasn't changed. Not Updating.") return self._logger.debug("Checkpoint changed; updating!") start_time = datetime.utcnow() user_metadata = self._get_user_metadata(session) groups, disabled_groups = self._get_groups(session, user_metadata) permissions = self._get_permissions(session) group_grants = self._get_group_grants(session) group_service_accounts = self._get_group_service_accounts(session) service_account_grants = all_service_account_permissions(session) nodes = self._get_nodes(groups, user_metadata) edges = self._get_edges(session) edges_without_np_owner = [ (n1, n2) for n1, n2, r in edges if GROUP_EDGE_ROLES[r["role"]] != "np-owner" ] graph = DiGraph() graph.add_nodes_from(nodes) graph.add_edges_from(edges) rgraph = graph.reverse() # We need a separate graph without np-owner edges to construct the mapping of # permissions to users with that grant. permission_graph = DiGraph() permission_graph.add_nodes_from(nodes) permission_graph.add_edges_from(edges_without_np_owner) grants_by_permission = self._get_grants_by_permission( permission_graph, group_grants, service_account_grants, user_metadata) with self.lock: self._graph = graph self._rgraph = rgraph self.checkpoint = checkpoint self.checkpoint_time = checkpoint_time self.user_metadata = user_metadata self._groups = groups self._disabled_groups = disabled_groups self._permissions = permissions self._group_grants = group_grants self._group_service_accounts = group_service_accounts self._service_account_grants = service_account_grants self._grants_by_permission = grants_by_permission duration = datetime.utcnow() - start_time stats.log_rate("graph_update_ms", int(duration.total_seconds() * 1000))
def can_contain( graph: nx.DiGraph, target: str = "shiny gold bags", ) -> set[str]: reversed_graph = graph.reverse(copy=True) connected_node_gen = nx.dfs_preorder_nodes(reversed_graph, source=target) connected_nodes = set(connected_node_gen) connected_nodes.remove(target) return connected_nodes
def update_from_db(self, session): # Only allow one thread at a time to construct a fresh graph. with self.update_lock: checkpoint, checkpoint_time = self._get_checkpoint(session) if checkpoint == self.checkpoint: self.logger.debug("Checkpoint hasn't changed. Not Updating.") return self.logger.debug("Checkpoint changed; updating!") new_graph = DiGraph() new_graph.add_nodes_from(self._get_nodes_from_db(session)) new_graph.add_edges_from(self._get_edges_from_db(session)) rgraph = new_graph.reverse() users = set() groups = set() for (node_type, node_name) in new_graph.nodes(): if node_type == "User": users.add(node_name) elif node_type == "Group": groups.add(node_name) user_metadata = self._get_user_metadata(session) permission_metadata = self._get_permission_metadata(session) service_account_permissions = all_service_account_permissions( session) group_metadata = self._get_group_metadata(session, permission_metadata) group_service_accounts = self._get_group_service_accounts(session) permission_tuples = self._get_permission_tuples(session) group_tuples = self._get_group_tuples(session) disabled_group_tuples = self._get_group_tuples(session, enabled=False) with self.lock: self._graph = new_graph self._rgraph = rgraph self.checkpoint = checkpoint self.checkpoint_time = checkpoint_time self.users = users self.groups = groups self.permissions = { perm.permission for perm_list in itervalues(permission_metadata) for perm in perm_list } self.user_metadata = user_metadata self.group_metadata = group_metadata self.group_service_accounts = group_service_accounts self.permission_metadata = permission_metadata self.service_account_permissions = service_account_permissions self.permission_tuples = permission_tuples self.group_tuples = group_tuples self.disabled_group_tuples = disabled_group_tuples
def dependent_node_iterator( in_graph: DiGraph, ) -> Generator[List[Any], None, None]: """ Create a generator that produces a list of nodes. Each generation of nodes only depend on previously generated nodes. """ def successor_it(g: DiGraph) -> Generator[List[Any], None, None]: visited = set() # start with all roots of the sub-graph to_emit = {n for n, d in g.in_degree if d == 0} # make sure a node is only selected if it is not visited already # and all predecessors have been visited already def allowed(nid: Hashable) -> bool: pred = g.predecessors(nid) req = [n for n in pred if n != nid and n not in visited] return nid not in visited and not req while to_emit: # emit the related node yield [nid for nid in to_emit] # add all nodes as visited visited.update(to_emit) # get all successors to_emit = { succ for nid in to_emit for succ in g.successors(nid) if allowed(succ) } if isinstance(in_graph, MultiDiGraph): raise RuntimeError("MultiDiGraph not supported") # reverse the directed graph -> a leaf becomes a root graph = in_graph.reverse() # find all islands and create a generator per island generators = [ successor_it(graph.subgraph(island_nodes)) for island_nodes in connected_components( graph.to_undirected(as_view=True)) ] # concatenate the result of the generators while generators: nxt = [] for e in generators: try: nxt.extend(next(e)) except StopIteration: generators.remove(e) if nxt: yield nxt
def update_from_db(self, session): # Only allow one thread at a time to construct a fresh graph. with self.update_lock: checkpoint, checkpoint_time = self._get_checkpoint(session) if checkpoint == self.checkpoint: self.logger.debug("Checkpoint hasn't changed. Not Updating.") return self.logger.debug("Checkpoint changed; updating!") new_graph = DiGraph() new_graph.add_nodes_from(self._get_nodes_from_db(session)) new_graph.add_edges_from(self._get_edges_from_db(session)) rgraph = new_graph.reverse() users = set() groups = set() for (node_type, node_name) in new_graph.nodes(): if node_type == "User": users.add(node_name) elif node_type == "Group": groups.add(node_name) user_metadata = self._get_user_metadata(session) permission_metadata = self._get_permission_metadata(session) service_account_permissions = all_service_account_permissions(session) group_metadata = self._get_group_metadata(session, permission_metadata) group_service_accounts = self._get_group_service_accounts(session) permission_tuples = self._get_permission_tuples(session) group_tuples = self._get_group_tuples(session) disabled_group_tuples = self._get_group_tuples(session, enabled=False) with self.lock: self._graph = new_graph self._rgraph = rgraph self.checkpoint = checkpoint self.checkpoint_time = checkpoint_time self.users = users self.groups = groups self.permissions = {perm.permission for perm_list in permission_metadata.values() for perm in perm_list} self.user_metadata = user_metadata self.group_metadata = group_metadata self.group_service_accounts = group_service_accounts self.permission_metadata = permission_metadata self.service_account_permissions = service_account_permissions self.permission_tuples = permission_tuples self.group_tuples = group_tuples self.disabled_group_tuples = disabled_group_tuples
def load_dependency_graph(self): try: if Config.get("dependency_graph"): self.log.debug( "Loading model dependency graph", path=Config.get("dependency_graph"), ) dep_graph_str = open(Config.get("dependency_graph")).read() else: self.log.debug("Using default model dependency graph", graph={}) dep_graph_str = "{}" # joint_dependencies is of the form { Model1 -> [(Model2, src_port, dst_port), ...] } # src_port is the field that accesses Model2 from Model1 # dst_port is the field that accesses Model1 from Model2 static_dependencies = json.loads(dep_graph_str) dynamic_dependencies = ( [] ) # Dropped Service and ServiceInstance dynamic dependencies joint_dependencies = dict( list(static_dependencies.items()) + dynamic_dependencies ) model_dependency_graph = DiGraph() for src_model, deps in joint_dependencies.items(): for dep in deps: dst_model, src_accessor, dst_accessor = dep if src_model != dst_model: edge_label = { "src_accessor": src_accessor, "dst_accessor": dst_accessor, } model_dependency_graph.add_edge( src_model, dst_model, **edge_label ) model_dependency_graph_rev = model_dependency_graph.reverse(copy=True) self.model_dependency_graph = { # deletion True: model_dependency_graph_rev, False: model_dependency_graph, } self.log.debug("Loaded dependencies", edges=model_dependency_graph.edges()) except Exception as e: self.log.exception("Error loading dependency graph", e=e) raise e
def load_dependency_graph(self): try: if Config.get("dependency_graph"): self.log.debug( "Loading model dependency graph", path=Config.get("dependency_graph"), ) dep_graph_str = open(Config.get("dependency_graph")).read() else: self.log.debug("Using default model dependency graph", graph={}) dep_graph_str = "{}" # joint_dependencies is of the form { Model1 -> [(Model2, src_port, dst_port), ...] } # src_port is the field that accesses Model2 from Model1 # dst_port is the field that accesses Model1 from Model2 static_dependencies = json.loads(dep_graph_str) dynamic_dependencies = [ ] # Dropped Service and ServiceInstance dynamic dependencies joint_dependencies = dict(static_dependencies.items() + dynamic_dependencies) model_dependency_graph = DiGraph() for src_model, deps in joint_dependencies.items(): for dep in deps: dst_model, src_accessor, dst_accessor = dep if src_model != dst_model: edge_label = { "src_accessor": src_accessor, "dst_accessor": dst_accessor, } model_dependency_graph.add_edge( src_model, dst_model, edge_label) model_dependency_graph_rev = model_dependency_graph.reverse( copy=True) self.model_dependency_graph = { # deletion True: model_dependency_graph_rev, False: model_dependency_graph, } self.log.debug("Loaded dependencies", edges=model_dependency_graph.edges()) except Exception as e: self.log.exception("Error loading dependency graph", e=e) raise e
def update_from_db(self, session): # type: (Session) -> None # Only allow one thread at a time to construct a fresh graph. with self._update_lock: checkpoint, checkpoint_time = self._get_checkpoint(session) if checkpoint == self.checkpoint: self._logger.debug("Checkpoint hasn't changed. Not Updating.") return self._logger.debug("Checkpoint changed; updating!") start_time = datetime.utcnow() user_metadata = self._get_user_metadata(session) groups, disabled_groups = self._get_groups(session, user_metadata) permissions = self._get_permissions(session) group_grants = self._get_group_grants(session) group_service_accounts = self._get_group_service_accounts(session) service_account_grants = all_service_account_permissions(session) graph = DiGraph() graph.add_nodes_from(self._get_nodes(groups, user_metadata)) graph.add_edges_from(self._get_edges(session)) rgraph = graph.reverse() grants_by_permission = self._get_grants_by_permission( graph, group_grants, service_account_grants ) with self.lock: self._graph = graph self._rgraph = rgraph self.checkpoint = checkpoint self.checkpoint_time = checkpoint_time self.user_metadata = user_metadata self._groups = groups self._disabled_groups = disabled_groups self._permissions = permissions self._group_grants = group_grants self._group_service_accounts = group_service_accounts self._service_account_grants = service_account_grants self._grants_by_permission = grants_by_permission duration = datetime.utcnow() - start_time stats.log_rate("graph_update_ms", int(duration.total_seconds() * 1000))
def __init__( self, g: nx.DiGraph, starts: List[int], ends: List[int], max_input_size: int, allow_param_in_concat: bool, kernel_sizes: List[int], strides: List[int] ): self.g = g self.max_node_idx = max(g.nodes) self.g_inv = g.reverse() self.starts = starts self.ends = ends self.allow_param_in_concat = allow_param_in_concat self.size_transision_graph = make_size_transition_graph(max_input_size, kernel_sizes, strides) self.scc_idx, self.g_compressed = self.compress_graph() self.g_compressed_inv = self.g_compressed.reverse() self.t_sorted = list(nx.topological_sort(self.g_compressed))
def __init__(self, path, version='0'): g = DiGraph() gaged_reaches = [] db = openFile(path, "r") table = db.getNode('/', 'networks/network' + str(version)) reaches = {} #read data out of file for row in table: if str(row['ComID']) != '-1': reaches[row['ComID']] = Reach(self, row) else: reaches[row['ComID']] = '-1' g.add_edge(Reach(self, row), '-1') if row['MonitoredFlag'] == '1': gaged_reaches.append(row['ComID']) db.close() #make network for comid in reaches.keys(): to_comID = reaches[comid]._ToComID if to_comID != '-1': g.add_edge(reaches[comid], reaches[to_comID]) else: g.add_edge(reaches[comid], -1) self._g_unbroken = g.copy() self._g_unbroken_reverse = self._g_unbroken.reverse() #break upstream of monitored reaches for i in gaged_reaches: if i != '-1': up = g.predecessors(reaches[i]) for j in up: if j != '-1': g.delete_edge(j, reaches[i]) else: g.delete_edge(j, '-1') self._g = g self._g_rev = g.reverse() self._version = str(version) self._path = str(path) self._reaches = reaches db.close()
def __init__(self, path, version="0"): g = DiGraph() gaged_reaches = [] db = openFile(path, "r") table = db.getNode("/", "networks/network" + str(version)) reaches = {} # read data out of file for row in table: if str(row["ComID"]) != "-1": reaches[row["ComID"]] = Reach(self, row) else: reaches[row["ComID"]] = "-1" g.add_edge(Reach(self, row), "-1") if row["MonitoredFlag"] == "1": gaged_reaches.append(row["ComID"]) db.close() # make network for comid in reaches.keys(): to_comID = reaches[comid]._ToComID if to_comID != "-1": g.add_edge(reaches[comid], reaches[to_comID]) else: g.add_edge(reaches[comid], -1) self._g_unbroken = g.copy() self._g_unbroken_reverse = self._g_unbroken.reverse() # break upstream of monitored reaches for i in gaged_reaches: if i != "-1": up = g.predecessors(reaches[i]) for j in up: if j != "-1": g.delete_edge(j, reaches[i]) else: g.delete_edge(j, "-1") self._g = g self._g_rev = g.reverse() self._version = str(version) self._path = str(path) self._reaches = reaches db.close()
def update_from_db(self, session): checkpoint, checkpoint_time = self._get_checkpoint(session) if checkpoint == self.checkpoint: logging.debug("Checkpoint hasn't changed. Not Updating.") return logging.debug("Checkpoint changed; updating!") new_graph = DiGraph() new_graph.add_nodes_from(self._get_nodes_from_db(session)) new_graph.add_edges_from(self._get_edges_from_db(session)) rgraph = new_graph.reverse() users = set() groups = set() for (node_type, node_name) in new_graph.nodes(): if node_type == "User": users.add(node_name) elif node_type == "Group": groups.add(node_name) user_metadata = self._get_user_metadata(session) permission_metadata = self._get_permission_metadata(session) group_metadata = self._get_group_metadata(session, permission_metadata) with self.lock: self._graph = new_graph self._rgraph = rgraph self.checkpoint = checkpoint self.checkpoint_time = checkpoint_time self.users = users self.groups = groups self.permissions = { perm.permission for perm_list in permission_metadata.values() for perm in perm_list } self.user_metadata = user_metadata self.group_metadata = group_metadata self.permission_metadata = permission_metadata
def load_dependency_graph(self): dep_path = Config.get("dependency_graph") self.log.info('Loading model dependency graph', path=dep_path) try: dep_graph_str = open(dep_path).read() # joint_dependencies is of the form { Model1 -> [(Model2, src_port, dst_port), ...] } # src_port is the field that accesses Model2 from Model1 # dst_port is the field that accesses Model1 from Model2 static_dependencies = json.loads(dep_graph_str) dynamic_dependencies = self.compute_service_dependencies() joint_dependencies = dict(static_dependencies.items() + dynamic_dependencies) model_dependency_graph = DiGraph() for src_model, deps in joint_dependencies.items(): for dep in deps: dst_model, src_accessor, dst_accessor = dep if src_model != dst_model: edge_label = { 'src_accessor': src_accessor, 'dst_accessor': dst_accessor } model_dependency_graph.add_edge( src_model, dst_model, edge_label) model_dependency_graph_rev = model_dependency_graph.reverse( copy=True) self.model_dependency_graph = { # deletion True: model_dependency_graph_rev, False: model_dependency_graph } self.log.info("Loaded dependencies", edges=model_dependency_graph.edges()) except Exception as e: self.log.exception("Error loading dependency graph", e=e) raise e
def extract_nipype_graph(graph: nx.DiGraph) -> dict: matrix = nx.to_dict_of_dicts(graph.reverse()) res = {} for node, pred_conn in matrix.items(): if isinstance(node.interface, IdentityInterface): continue node_name = node.fullname.split('.')[1] res[node_name] = {} for name, _ in node.inputs.items(): val = getattr(node.inputs, name) if val is not Undefined: res[node_name][name] = val for pred, conn in pred_conn.items(): conn = conn['connect'] for pair in conn: src, dst = pair if isinstance(pred.interface, IdentityInterface): res[node_name][dst] = getattr(pred.inputs, src) else: pred_name = pred.fullname.split('.')[1] res[node_name][dst] = (pred_name, src) return res
def extract_radiome_graph(graph: nx.DiGraph) -> dict: mapping = {} matrix = nx.to_dict_of_dicts(graph.reverse()) for data in graph.nodes.data(): id, state = data job = state['job'].resource mapping[id] = job res = {} for node_id, pred_conn in matrix.items(): job = mapping[node_id] if isinstance(job, NipypeJob): node_name = job._reference res[node_name] = {} for name, _ in job._interface.inputs.items(): val = getattr(job._interface.inputs, name) if val is not Undefined: res[node_name][name] = val for name, val in job.dependencies().items(): res[node_name][name] = val elif isinstance(job, PythonJob): node_name = job._reference res[node_name] = {} for name, val in job.dependencies().items(): res[node_name][name] = val else: continue for pred_id, conn in pred_conn.items(): pred = mapping[pred_id] field = conn['field'] if isinstance(pred, ComputedResource): res[node_name][field] = (pred.content[0]._reference, pred.content[1]) elif isinstance(pred, Resource): res[node_name][field] = pred.content return res
def hand_item_to_non_envy(envy_graph: nx.DiGraph, agents_dict: Dict[str, AdditiveAgent], all_agents: List[AdditiveAgent], items_remaining: List[str], allocation: Allocation) -> None: """ Finds an agent that no-one is envy of, and allocates an item to him. @param envy_graph: The Envy-Graph @param agents_dict: The agents in the Graph @param all_agents: A list of the agents @param items_remaining: A list of all the items that have not been allocated @param allocation: The allocation of items >>> Alice = AdditiveAgent({"a": 1, "b": 1, "c": 1, "d": 4, "e": 1}, name="Alice") >>> Alice.aq_items = ['a'] >>> Bob = AdditiveAgent({"a": 1, "b": 1, "c": 2, "d": 1, "e": 5}, name="Bob") >>> Bob.aq_items = ['c'] >>> Eve = AdditiveAgent({"a": 3, "b": 1, "c": 1, "d": 1, "e": 2}, name="Eve") >>> Eve.aq_items = ['b'] >>> agents_dict = {x.name():x for x in [Alice,Bob,Eve]} >>> envy_graph = create_envy_graph(agents_dict) >>> items_remaining = list('de') >>> alloc = Allocation([Alice,Bob,Eve]) >>> hand_item_to_non_envy(envy_graph,agents_dict,[Alice,Bob,Eve],items_remaining,alloc) >>> sorted(list(alloc.get_bundle(1))) ['c', 'e'] """ rev_di_graph = envy_graph.reverse() for n in rev_di_graph.nodes(): if len(rev_di_graph.edges(n)) == 0: pop_item = items_remaining.pop() agnt = agents_dict[n] agnt.aq_items.append(pop_item) allocation.set_bundle(all_agents.index(agnt), set(agnt.aq_items)) logger.info("\tAgent {} received item {}".format( agnt.name(), pop_item)) return
def update_from_db(self, session): checkpoint, checkpoint_time = self._get_checkpoint(session) if checkpoint == self.checkpoint: logging.debug("Checkpoint hasn't changed. Not Updating.") return logging.debug("Checkpoint changed; updating!") new_graph = DiGraph() new_graph.add_nodes_from(self._get_nodes_from_db(session)) new_graph.add_edges_from(self._get_edges_from_db(session)) rgraph = new_graph.reverse() users = set() groups = set() for (node_type, node_name) in new_graph.nodes(): if node_type == "User": users.add(node_name) elif node_type == "Group": groups.add(node_name) user_metadata = self._get_user_metadata(session) permission_metadata = self._get_permission_metadata(session) group_metadata = self._get_group_metadata(session, permission_metadata) with self.lock: self._graph = new_graph self._rgraph = rgraph self.checkpoint = checkpoint self.checkpoint_time = checkpoint_time self.users = users self.groups = groups self.permissions = {perm.permission for perm_list in permission_metadata.values() for perm in perm_list} self.user_metadata = user_metadata self.group_metadata = group_metadata self.permission_metadata = permission_metadata
def kosaraju(G: nx.DiGraph): g = nx.to_dict_of_lists(G) d = [-1 for _ in g] f = [-1 for _ in g] t = 0 for v in G: if d[v] == -1: t = DFS_visit(v, g, d, f, t) Gt = G.reverse(True) gt = nx.to_dict_of_lists(Gt) nr = 0 comp = [-1 for _ in g] for v in sorted(gt, key=lambda x: f[x], reverse=True): if comp[v] == -1: nr = nr + 1 comp[v] = nr Components_R(nr, v, gt, comp) return comp
def __init__( self, g: nx.DiGraph, starts: List[int], ends: List[int], network_input_sizes: Dict[int, int], node_output_sizes: Dict[int, int], node_output_dimensions: Dict[int, int], network_output_sizes: Dict[int, int], kernel_sizes: List[int], strides: List[int], ): self.g = g self.g_inv = g.reverse() self.starts = starts self.ends = ends self.network_input_sizes = network_input_sizes self.node_output_sizes = node_output_sizes self.node_output_dimensions = node_output_dimensions self.network_output_sizes = network_output_sizes self.node_input_sizes = self.__get_input_sizes() self.node_input_dimensions = self.__get_input_dimensions() self.kernel_sizes = kernel_sizes self.strides = strides
class VariableDictionary(SequenceDict): """Ordered Dictionary to hold variable values. It maintains a dependency graph to check for cycles and to recalculate the necessary values when one of the fields is updated""" expression = Expression() def __init__(self, *args, **kwargs): self.valueView = VariableDictionaryView(self) self.dependencyGraph = DiGraph() self.globaldict = dict() super(VariableDictionary, self).__init__(*args, **kwargs) def __getstate__(self): return dict((key, value) for key, value in self.__dict__ if key not in ['globaldict']) def __reduce__(self): theclass, theitems, inst_dict = super(VariableDictionary, self).__reduce__() inst_dict.pop('globaldict', None) return theclass, theitems, inst_dict def setGlobaldict(self, globaldict): self.globaldict = globaldict def calculateDependencies(self): self.dependencyGraph = DiGraph() # clear the old dependency graph in case parameters got removed for name, var in self.items(): if hasattr(var, 'strvalue'): try: var.value, dependencies = self.expression.evaluate(var.strvalue, self.valueView, listDependencies=True) self.addDependencies(self.dependencyGraph, dependencies, name) var.strerror = None except (KeyError, ValueError, ZeroDivisionError) as e: logging.getLogger(__name__).warning( str(e) ) var.strerror = str(e) except Exception as e: errstr = "Unable to evaluate the expression '{0}' for variable '{1}'.".format(var.strvalue,var.name) logging.getLogger(__name__).warning( errstr ) var.strerror = errstr else: var.strerror = None self.recalculateAll() def merge(self, variabledict, globaldict=None, overwrite=False, linkNewToParent=False ): if globaldict is not None: self.globaldict = globaldict for name in list(self.keys()): if name not in variabledict: self.pop(name) for name, var in variabledict.items(): if var.type in ['parameter', 'address'] and (name not in self or overwrite): self[name] = copy.deepcopy(var) if linkNewToParent: self[name].useParentValue = True self.sortToMatch( list(variabledict.keys()) ) self.calculateDependencies() def __setitem__(self, key, value): super(VariableDictionary, self).__setitem__(key, value) if hasattr(value, 'strvalue'): self.setStrValue( key, value.strvalue ) def __deepcopy__(self, memo): new = type(self)() new.globaldict = self.globaldict new.update( (name, copy.deepcopy(value)) for name, value in list(self.items())) new.dependencyGraph = self.dependencyGraph #calculateDependencies() return new def addDependencies(self, graph, dependencies, name): """add all the dependencies to name""" for dependency in dependencies: self.addEdgeNoCycle(graph, dependency, name) def addEdgeNoCycle(self, graph, first, second ): """add the dependency to the graph, raise CyclicDependencyException in case of cyclic dependencies""" graph.add_edge(first, second) cycles = simple_cycles( graph ) for cycle in cycles: raise CyclicDependencyException(cycle) def setStrValueIndex(self, index, strvalue): return self.setStrValue( self.keyAt(index), strvalue) def setStrValue(self, name, strvalue): """update the variable value with strvalue and recalculate as necessary""" var = self[name] try: result, dependencies = self.expression.evaluate(strvalue, self.valueView, listDependencies=True ) graph = self.dependencyGraph.copy() # make a copy of the graph. In case of cyclic dependencies we do not want o leave any changes for edge in list(graph.in_edges([name])): # remove all the inedges, dependencies from other variables might be gone graph.remove_edge(*edge) self.addDependencies(graph, dependencies, name) # add all new dependencies var.value = result var.strvalue = strvalue self.dependencyGraph = graph var.strerror = None except KeyError as e: var.strerror = str(e) except Exception as e: errstr = "Unable to evaluate the expression '{0}' for variable '{1}'.".format(strvalue,name) logging.getLogger(__name__).warning( errstr ) var.strerror = errstr return self.recalculateDependent(name) def setParentStrValue(self, name, strvalue): var = self[name] try: result, dependencies = self.expression.evaluate(strvalue, self.valueView, listDependencies=True ) graph = self.dependencyGraph.copy() # make a copy of the graph. In case of cyclic dependencies we do not want o leave any changes for edge in list(graph.in_edges([name])): # remove all the inedges, dependencies from other variables might be gone graph.remove_edge(*edge) self.addDependencies(graph, dependencies, name) # add all new dependencies var.parentValue = result var.parentStrvalue = strvalue self.dependencyGraph = graph var.strerror = None except KeyError as e: var.strerror = str(e) except Exception as e: errstr = 'Unable to evaluate the expression \'{0}\' for variable \'{1}\'.'.format(strvalue,name) logging.getLogger(__name__).warning( errstr ) var.strerror = errstr return self.recalculateDependent(name) def setValue(self, name, value): """update the variable value with value and recalculate as necessary. This is done using existing dependencies.""" var = self[name] try: var.value = value var.strvalue = "" var.strerror = None except KeyError as e: var.strerror = str(e) return self.recalculateDependent(name, returnResult=True) def setParentValue(self, name, value): """update the variable value with value and recalculate as necessary. This is done using existing dependencies.""" var = self[name] try: var.parentValue = value var.parentStrvalue = "" var.strerror = None except KeyError as e: var.strerror = str(e) return self.recalculateDependent(name, returnResult=True) def setEncodingIndex(self, index, encoding): self.at(index).encoding = None if encoding == 'None' else str(encoding) def setEnabledIndex(self, index, enabled): self.at(index).enabled = enabled def recalculateDependent(self, node, returnResult=False): if self.dependencyGraph.has_node(node): generator = dfs_preorder_nodes(self.dependencyGraph, node) next(generator ) # skip the first, that is us nodelist = list(generator) # make a list, we need it twice result = [ self.recalculateNode(node) for node in nodelist ] return (nodelist, result) if returnResult else nodelist # return which ones were re-calculated, so gui can be updated return (list(), list()) if returnResult else list() def recalculateNode(self, node): if node in self: var = self[node] if hasattr(var, 'strvalue'): try: var.value = self.expression.evaluate(var.strvalue, self.valueView) var.strerror = None except (KeyError, ValueError) as e: var.strerror = str(e) except Exception as e: errstr = 'Unable to evaluate the expression \'{0}\'.'.format(var.strvalue) logging.getLogger(__name__).warning( errstr ) var.strerror = errstr else: logging.getLogger(__name__).warning("variable {0} does not have strvalue. Value is {1}".format(var, var.value)) return var.value return None def recalculateAll(self): g = self.dependencyGraph.reverse() for node, indegree in g.in_degree_iter(): if indegree==0: for calcnode in dfs_postorder_nodes(g, node): self.recalculateNode(calcnode) def bareDictionaryCopy(self): return SequenceDict( self )
#!/usr/bin/env python3 from os import path from networkx import DiGraph, dfs_postorder_nodes with open(path.join(path.dirname(__file__), "input.txt")) as f: G = DiGraph() for line in f: bag, contains = line.strip().rstrip(".").split(" bags contain ") if contains == "no other bags": continue for other in contains.split(", "): count = int(other[0]) other = other[2:].rstrip("bags").strip() G.add_edge(bag, other, count=count) print("Part 1:", len(list(dfs_postorder_nodes(G.reverse(), "shiny gold"))) - 1) for node in dfs_postorder_nodes(G, "shiny gold"): G.nodes[node]["count"] = sum((G.nodes[n]["count"] + 1) * v["count"] for (n, v) in G[node].items()) print("Part 2:", G.nodes["shiny gold"]["count"])
def calculate_derived_outputs( requests: List[dict], graph: networkx.DiGraph, outputs: np.ndarray, times: np.ndarray, timestep: float, flows: List[BaseFlow], compartments: List[Compartment], get_flow_rates: Callable[[np.ndarray, float], np.ndarray], whitelist: Optional[List[str]], ) -> Dict[str, np.ndarray]: """ Calculates all requested derived outputs from the calculated compartment sizes. Args: requests: Descriptions of the new outputs we should create. graph: A DAG describing how the requests depend on each other. outputs: The compartmental model outputs - compartment sizes over time. times: The times that the outputs correspond to. timestep: The timestep used to generate the model times. flows: The flows used by the model. compartments: The compartments used by the model. get_flow_rates: A function that gets the model flow rates for a given state and time. whitelist: An optional subset of requests to evaluate. Returns: Dict[str, np.ndarray]: The timeseries results for each requested output. """ assert outputs is not None, "Cannot calculate derived outputs: model has not been run." error_msg = "Cannot calculate derived outputs: dependency graph has cycles." assert networkx.is_directed_acyclic_graph(graph), error_msg graph = graph.copy() # We're going to mutate the graph so copy it first. if whitelist: # Only calculate the required outputs and their dependencies, ignore everything else. required_nodes = set() for name in whitelist: # Find a list of the output required and its dependencies. output_dependencies = networkx.dfs_tree(graph.reverse(), source=name).reverse() required_nodes = required_nodes.union(output_dependencies.nodes) # Remove any nodes that aren't required from the graph. nodes = list(graph.nodes) for node in nodes: if not node in required_nodes: graph.remove_node(node) derived_outputs = {} outputs_to_delete_after = [] # Calculate all flow rates and store in `flow_values` so that we can fulfill flow rate requests. # We need to do this here because some solvers do not necessarily evaluate all timesteps. flow_values = np.zeros((len(times), len(flows))) for time_idx, time in enumerate(times): # Flow rates are instantaneous; we need to provide and integrated value over timestep flow_values[time_idx, :] = get_flow_rates(outputs[time_idx], time) * timestep # Convert tracked flow values into a matrix where the 1st dimension is flow type, 2nd is time flow_values = np.array(flow_values).T # Calculate all the outputs in the correct order so that each output has its dependencies fulfilled. for name in networkx.topological_sort(graph): request = requests[name] request_type = request["request_type"] output = np.zeros(times.shape) if not request["save_results"]: # Delete the results of this output once the calcs are done. outputs_to_delete_after.append(name) if request_type == DerivedOutputRequest.FLOW: # User wants to track a set of flow rates over time. output = _get_flow_output(request, times, flows, flow_values) elif request_type == DerivedOutputRequest.COMPARTMENT: # User wants to track a set of compartment sizes over time. output = _get_compartment_output(request, outputs, compartments) elif request_type == DerivedOutputRequest.AGGREGATE: # User wants to track the sum of a set of outputs over time. output = _get_aggregate_output(request, derived_outputs) elif request_type == DerivedOutputRequest.CUMULATIVE: # User wants to track cumulative value of an output over time. output = _get_cumulative_output(request, name, times, derived_outputs) elif request_type == DerivedOutputRequest.FUNCTION: # User wants to track the results of a function of other outputs over time. output = _get_func_output(request, derived_outputs) derived_outputs[name] = output # Delete any intermediate outputs that we don't want to save. for name in outputs_to_delete_after: del derived_outputs[name] return derived_outputs
def FDT2CDG(cfg: nx.DiGraph, if_id, node_infos): leafs = [] control_dep_edge = {} cdg_edges = [] idoms = nx.algorithms.immediate_dominators(cfg, "0") print("后向支配关系:", idoms) # 查询当前cfg所有叶子节点 for cfg_node in cfg.nodes: if cfg.out_degree(cfg_node) == 0: # 叶子节点列表 leafs.append(cfg_node) cfg.add_node("EXIT_POINT", label="EXIT_POINT") for leaf_node in leafs: cfg.add_edge(leaf_node, "EXIT_POINT") get_graph_png(cfg, "cfg") reverse_cfg = cfg.reverse() get_graph_png(reverse_cfg, "reverse") ifdoms = nx.algorithms.immediate_dominators(reverse_cfg, "EXIT_POINT") del ifdoms["EXIT_POINT"] print("前向支配关系:", ifdoms) # FDT fdt = nx.DiGraph(name=cfg.graph["name"]) fdt.add_nodes_from(reverse_cfg.nodes) for s in ifdoms: fdt.add_edge(ifdoms[s], s) get_graph_png(fdt, "fdt") # CDG = CFG + FDT 计算 条件语句相关控制依赖 for id in if_id: ifdom = ifdoms[str(id)] print("ifdom of {} is {}".format(id, ifdom)) cfg_paths = nx.all_simple_paths(cfg, source=str(id), target="EXIT_POINT") # Y is control dependent on X ⇔ there is a path in the CFG from X to Y that doesn’t contain the immediate # forward dominator of X for path in list(cfg_paths): for node in path[1:-1]: node_info = node_infos[node] if node_info.type != NodeType.ENDIF and node_info.type != NodeType.ENDLOOP: if node != ifdom: key = "{}-{}".format(node, str(id)) if key not in control_dep_edge: control_dep_edge[key] = 1 length = nx.shortest_path_length( cfg, str(id), node) cdg_edges.append({ 'from': node, "to": str(id), 'color': 'red', 'distance': length }) print("{} 控制依赖于 {}, 距离是: {}".format( node, id, length)) else: break control_dep_edge.clear() for cdg_edge in cdg_edges: from_node = cdg_edge["from"] to_node = cdg_edge["to"] distance = cdg_edge["distance"] if from_node not in control_dep_edge: control_dep_edge[from_node] = {"to": to_node, "distance": distance} else: old_distance = control_dep_edge[from_node]["distance"] if old_distance > distance: control_dep_edge[from_node] = { "to": to_node, "distance": distance } for from_node in control_dep_edge: cfg.add_edge(from_node, control_dep_edge[from_node]["to"], color="red") get_graph_png(cfg, "cdg")
def __init__(self, g: nx.DiGraph, starts: List[int], ends: List[int]): self.__check_node_order(g) self.g = g self.g_inv = g.reverse() self.starts = starts self.ends = ends
def __update_stats__(self, tag_name, t: nx.DiGraph): super(TreePreprocessor, self).__update_stats__(tag_name, t) in_degree_list = [d for u, d in t.in_degree] n_leaves = len([x for x in in_degree_list if x == 0]) rev_t = t.reverse() root = [u for u in t.nodes if t.out_degree(u) == 0][0] depth_list = { k: len(v) - 1 for k, v in nx.shortest_path(rev_t, root).items() } # update dataset stats self.dataset_stats[tag_name]['tot_nodes'] += t.number_of_nodes() self.dataset_stats[tag_name]['tot_leaves'] += n_leaves self.dataset_stats[tag_name]['no_labels'] += len([ i for i, d in t.nodes(data=True) if d['y'] == ConstValues.NO_ELEMENT ]) self.dataset_stats[tag_name]['max_out_degree'] = max( self.dataset_stats[tag_name]['max_out_degree'], max(in_degree_list)) # update tree stats self.tree_stats[tag_name]['num_leaves'].append(n_leaves) self.tree_stats[tag_name]['height'].append(max(depth_list.values())) self.tree_stats[tag_name]['max_out_degree'].append(max(in_degree_list)) # update node stats for x in in_degree_list: if x not in self.node_stats[tag_name]['out_degree']: self.node_stats[tag_name]['out_degree'][x] = 0 self.node_stats[tag_name]['out_degree'][x] += 1 for x in depth_list.values(): if x not in self.node_stats[tag_name]['depth']: self.node_stats[tag_name]['depth'][x] = 0 self.node_stats[tag_name]['depth'][x] += 1 # update type stats if self.typed: for i, d in t.nodes(data=True): t_id = d['t'] if t_id != ConstValues.NO_ELEMENT: # update freqeuncy if t_id not in self.type_stats[tag_name]['type_freq']: self.type_stats[tag_name]['type_freq'][t_id] = 0 self.type_stats[tag_name]['type_freq'][t_id] += 1 # update out degree if t_id not in self.type_stats[tag_name][ 'type_max_out_degree']: self.type_stats[tag_name]['type_max_out_degree'][ t_id] = 0 self.type_stats[tag_name]['type_max_out_degree'][ t_id] = max( t.in_degree(i), self.type_stats[tag_name] ['type_max_out_degree'][t_id]) else: self.dataset_stats[tag_name]['no_types'] += 1 self.dataset_stats[tag_name]['num_types'] = len( self.type_stats[tag_name]['type_freq'])
def reverse_weights(g:nx.DiGraph, weight='weight'): g = g.reverse() for s, t in g.edges_iter(): e = g[s][t] e[weight] = -e[weight] return g
def __init__(self, DG: nx.DiGraph): post = self.getpost(DG.reverse()) seq = sorted(DG.nodes, key=post.__getitem__, reverse=True) self.calc_cc(DG, seq=seq)