Beispiel #1
0
    def load_dependency_graph(self):
        dep_path = Config.get("dependency_graph")
        self.log.info('Loading model dependency graph', path = dep_path)

        try:
            dep_graph_str = open(dep_path).read()

            # joint_dependencies is of the form { Model1 -> [(Model2, src_port, dst_port), ...] }
            # src_port is the field that accesses Model2 from Model1
            # dst_port is the field that accesses Model1 from Model2
            joint_dependencies = json.loads(dep_graph_str)

            model_dependency_graph = DiGraph()
            for src_model, deps in joint_dependencies.items():
                for dep in deps:
                    dst_model, src_accessor, dst_accessor = dep
                    if src_model != dst_model:
                        edge_label = {'src_accessor': src_accessor,
                                      'dst_accessor': dst_accessor}
                        model_dependency_graph.add_edge(
                            src_model, dst_model, edge_label)

            model_dependency_graph_rev = model_dependency_graph.reverse(
                copy=True)
            self.model_dependency_graph = {
                # deletion
                True: model_dependency_graph_rev,
                False: model_dependency_graph
            }
            self.log.info("Loaded dependencies", edges = model_dependency_graph.edges())
        except Exception as e:
            self.log.exception("Error loading dependency graph", e = e)
            raise e
Beispiel #2
0
def sample(n):
  T = DiGraph()
  alive = dict()
  heights = list()
  total = 0.0
  for i in range(n):
    alive[i] = 0.0

  k = n
  while k > 1:
    event = exponential(1.0/binom(k, 2))
    total += event
    heights.append(total)
    for c in alive.keys():
      alive[c] += event

    [a, b] = subset(alive.keys(), 2)
    c = new_node(k)
    alive[a]
    alive[b]
    T.add_edge(a, c, length = alive[a])
    T.add_edge(b, c, length = alive[b])

    del alive[a]
    del alive[b]
    alive[c] = 0.0

    k -= 1

  T.below = collapse(T)
  T.heights = heights

  return T
def build_graph(alternatives, outranking, credibility=False):
    """There are some conventions to follow in this function:
    1. labels (i.e. alternatives' ids) are kept in graph's dictionary (see:
       graph.graph)
    2. aggregated nodes (only numbers, as list) are kept under 'aggr' key in
       node's dict (see: graph.nodes(data=True))
    3. weights on the edges are kept under 'weight' key in edge's dict -
       similarly as with nodes (see: graph.edges(data=True))
    """
    graph = DiGraph()  # we need directed graph for this
    # creating nodes...
    for i, alt in enumerate(alternatives):
        graph.add_node(i)
        graph.graph.update({i: alt})
    # creating edges...
    for i, alt in enumerate(alternatives):
        relations = outranking.get(alt)
        if not relations:  # if graph is built from intersectionDistillation
            continue
        for relation in relations.items():
            if relation[1] == 1.0:
                weight = credibility[alt][relation[0]] if credibility else None
                graph.add_edge(i, alternatives.index(relation[0]),
                               weight=weight)
    return graph
def common_edge_ratio(ref_user_connections, eval_user_connections, is_directed=False):
    """ caulcalate the fraction of common edges fraction out of union of two graphs

    Parameters:
    ==========
    ref_user_connections: a list of edges
    eval_user_connections: a list of edges
    is_directed: boolean,
        False (default): edges forms an undirected graph
        True: edges forms a directed graph
    """
    ref_user_connections = _normalize_connections(ref_user_connections, is_directed)
    eval_user_connections = _normalize_connections(eval_user_connections, is_directed)

    if is_directed:
        ref_graph, eval_graph = DiGraph(), DiGraph()
    else:
        ref_graph, eval_graph = Graph(), Graph()

    ref_graph.add_edges_from(ref_user_connections)
    eval_graph.add_edges_from(eval_user_connections)

    ref_edges, eval_edges = ref_graph.edges(), eval_graph.edges()

    tot_common = sum([1 if edge in ref_edges else 0 for edge in eval_edges])
    union_size = len(ref_edges) + len(eval_edges) - tot_common
    return tot_common / union_size
Beispiel #5
0
    def add_nodes_from(self, nodes, **attr):
        H=copy.deepcopy(self)
        self.clear()
        if not H.nodes():
            DiGraph.add_nodes_from(self, nodes, **attr)

            self.names=names=sorted(nodes)
            for i, n in enumerate(self.names):
                self.node[i]={'name': n, 'pmf': Pmf()}
                self.node[i]['pmf'].Set(1,self.p)
                self.node[i]['pmf'].Set(0, 1-self.p)
                self.remove_node(n)
                self.edge[i]={}
                self.indep_vars+=[i]
            self.SetProbs()
            return

        DiGraph.add_nodes_from(self, nodes, **attr)
        #ind_vars=[var for var in H.indep_vars]
        #DiGraph.add_nodes_from(self, ind_vars)
        self.names=names=sorted(set(H.names + nodes))
        for i, n in enumerate(names):
            try:
                self.node[i], self.edge[i]=H.node[i], H.edge[i]
            except:
                self.node[i]={'name': n, 'pmf': Pmf()}
                self.node[i]['pmf'].Set(1,self.p)
                self.node[i]['pmf'].Set(0, 1-self.p)
                self.remove_node(n)
                self.edge[i]={}
                self.indep_vars+=[i]

        self.SetProbs()
def build_network_from_db():
    """
    Creates a new graph with data inserted from the database,
    overwrites the current graph. This function will extract all
    producers from the database and iterate through their source_ratings
    to build the global network. Therefore, the time to complete running this
    function depends on the number of producers in the database
    and the number of ratings they have set on each other.

    Returns: the global network (type NetworkX DiGraph)

    """
    
    global graph
    # Users not included in graph.
    producers = Producer.objects()
    graph = DiGraph()
    tmp = []
    for p1 in producers:    
        try:
            tmp.append(extractor.get_producer(p1.name))
        except Exception:
            pass

    for p2 in tmp:
        for k,v in p2.source_ratings.iteritems():
            graph.add_edge(p2.name, k, v)  
    
    return graph
    
    """
Beispiel #7
0
def lazy_load_trees(skeleton_ids, node_properties):
    """ Return a lazy collection of pairs of (long, DiGraph)
    representing (skeleton_id, tree).
    The node_properties is a list of strings, each being a name of a column
    in the django model of the Treenode table that is not the treenode id, parent_id
    or skeleton_id. """

    values_list = ('id', 'parent_id', 'skeleton_id')
    props = tuple(set(node_properties) - set(values_list))
    values_list += props

    ts = Treenode.objects.filter(skeleton__in=skeleton_ids) \
            .order_by('skeleton') \
            .values_list(*values_list)
    skid = None
    tree = None
    for t in ts:
        if t[2] != skid:
            if tree:
                yield (skid, tree)
            # Prepare for the next one
            skid = t[2]
            tree = DiGraph()

        fields = {k: v for k,v in izip(props, islice(t, 3, 3 + len(props)))}
        tree.add_node(t[0], fields)

        if t[1]:
            # From child to parent
            tree.add_edge(t[0], t[1])

    if tree:
        yield (skid, tree)
Beispiel #8
0
    def to_networkx(self):
        """Return a NetworkX DiGraph object representing the single linkage tree.

        Edge weights in the graph are the distance values at which child nodes
        merge to form the parent cluster.

        Nodes have a `size` attribute attached giving the number of points
        that are in the cluster.
        """
        try:
            from networkx import DiGraph, set_node_attributes
        except ImportError:
            raise ImportError('You must have networkx installed to export networkx graphs')

        max_node = 2 * self._linkage.shape[0]
        num_points = max_node - (self._linkage.shape[0] - 1)

        result = DiGraph()
        for parent, row in enumerate(self._linkage, num_points):
            result.add_edge(parent, row[0], weight=row[2])
            result.add_edge(parent, row[1], weight=row[2])

        size_dict = {parent : row[3] for parent, row in enumerate(self._linkage, num_points)}
        set_node_attributes(result, 'size', size_dict)

        return result
Beispiel #9
0
def test_get_rc_chain():
    mock_g = DiGraph()
    mock_g.add_edges_from([('A', 'B', {'RC': 'I'}), ('B', 'C', {'RC': 'S'}),
                           ('C', 'D', {'RC': 'L'}), ('D', 'E', {'RC': 'O'})])
    tp = ['B', 'C', 'D']
    nt.assert_equal(MapGraph._get_rc_chain.im_func(mock_g, 'A', tp, 'E'),
                    'ISLO')
Beispiel #10
0
 def build_graph(self):
     new_graph = DiGraph()
     # Rebuild the graph from the LSDB
     for lsa in chain(self.routers.values(),
                      self.networks.values(),
                      self.ext_networks.values()):
         lsa.apply(new_graph, self)
     # Contract all IPs to their respective router-id
     for lsa in self.routers.values():
         lsa.contract_graph(new_graph, self.router_private_address.get(
             lsa.routerid, []))
     # Figure out the controllers layout
     base_net = ip_network(CFG.get(DEFAULTSECT, 'base_net'))
     controller_prefix = CFG.getint(DEFAULTSECT, 'controller_prefixlen')
     # Group by controller and log them
     for ip in new_graph.nodes_iter():
         addr = ip_address(ip)
         if addr in base_net:
             """1. Compute address diff to remove base_net
                2. Right shift to remove host bits
                3. Mask with controller mask
             """
             id = (((int(addr) - int(base_net.network_address)) >>
                    base_net.max_prefixlen - controller_prefix) &
                   ((1 << controller_prefix) - 1))
             self.controllers[id].append(ip)
     # Contract them on the graph
     for id, ips in self.controllers.iteritems():
         contract_graph(new_graph, ips, 'C_%s' % id)
     # Remove generated self loops
     new_graph.remove_edges_from(new_graph.selfloop_edges())
     self.apply_secondary_addresses(new_graph)
     return new_graph
Beispiel #11
0
 def remove_node(self, n):
     if n not in self.nodes():
         return
     if self.node[n]['conn'] is not None \
                 and not self.node[n]['conn'].disconnected:
         self.node[n]['conn'].disconnect()
     DiGraph.remove_node(self, n)
Beispiel #12
0
 def remove_edges_from(self,  ebunch):
     temp = self.copy()
     DiGraph.remove_edges_from(temp, ebunch)
     if not temp._is_connected():
         raise ValueError("Removing edges %s creates disconnected graph" %(ebunch, ) )
     else:
         DiGraph.remove_edges_from(self,  ebunch)
Beispiel #13
0
 def remove_edge(self, u,  v = None):
     temp = self.copy()
     DiGraph.remove_edge(temp, u, v = v)
     if not temp._is_connected():
         raise ValueError("Removing edge (%s, %s) creates disconnected graph" %(u, v) )
     else:
         DiGraph.remove_edge(self, u, v = v)
Beispiel #14
0
	def merge(self,minN):
		
		import numpy as np
		merged=[]
		merged_cliq=[]
		while len(DiGraph.nodes(self)):
			#print(len(self.nodes()))
			contcmp,ct_cliq=self.splitG(minN)

			if not DiGraph.nodes(self):
				break
			merged=merged+contcmp 
			merged_cliq=merged_cliq+ct_cliq

			try:
				#print("point1")
				cut_nodes=minimum_node_cut(self)

				#print("point2")
			except:
				nodes=DiGraph.nodes(self)
				index=np.random.randint(len(nodes))
				cut_nodes=[nodes[index]]

			for node in cut_nodes:
				DiGraph.remove_node(self,node)

		self.topics=merged
		self.topic_cliq=merged_cliq
    def test():
        bayesian_network = DiGraph()
        edges = [('A', 'C'), ('B', 'C'), ('C', 'D'), ('C', 'E'), ('D', 'F'), ('D', 'G')]
        bayesian_network.add_edges_from(edges)
        for node in bayesian_network.nodes():
            node_object = bayesian_network.node[node]
            #  All the variables are binary
            node_object['values'] = ['0', '1']

        conditional_probabilities = {
                                                'A1': 0.7,
                                                'A0':0.3,
                                                'B1': 0.4,
                                                'B0':0.6,
                                                'C1|A0,B0': 0.1, 'C1|A1,B0': 0.3,
                                                'C1|A0,B1': 0.5, 'C1|A1,B1': 0.9,
                                                'C0|A0,B0': 0.9, 'C0|A1,B0': 0.7,
                                                'C0|A0,B1': 0.5, 'C0|A1,B1': 0.1,
                                                'D1|C0': 0.8, 'D1|C1': 0.3,
                                                'D0|C0': 0.2, 'D0|C1': 0.7,
                                                'E1|C0': 0.2, 'E1|C1': 0.6,
                                                'E0|C0': 0.8, 'E0|C1': 0.4,
                                                'F1|D0': 0.1, 'F1|D1': 0.7,
                                                'F0|D0': 0.9, 'F0|D1': 0.3,
                                                'G1|D0': 0.9, 'G1|D1': 0.4,
                                                'G0|D0': 0.1, 'G0|D1': 0.6
                                     }
        inference = PearlsInference(bayesian_network, conditional_probabilities)
        print '-------------------------------'
        inference.add_evidence(['C', '1'])
        print '----------------------------------'
        inference.add_evidence(['A', '1'])
        pprint(conditional_probabilities)
Beispiel #16
0
def select_binary_groups(groups, root=desc.root):
    def get_bin_group(grps, bin_grps, rt, graph):
        level = grps.successors(rt)
        if not level:
            bin_grps.append(graph)
            return bin_grps
        elif len(level) == 2:
            graph.add_nodes_from(level)
            graph.add_edge(rt, level[0], grps.get_edge_data(rt, level[0]))
            graph.add_edge(rt, level[1], grps.get_edge_data(rt, level[1]))
            if grps.successors(level[0]):
                get_bin_group(grps, bin_grps, rt=level[0], graph=graph)
            else:
                get_bin_group(grps, bin_grps, rt=level[1], graph=graph)
        else:
            level.sort()
            for i in xrange(0, len(level), 2):
                g = DiGraph(graph)
                g.add_nodes_from([level[i], level[i + 1]])
                g.add_edge(rt, level[i], grps.get_edge_data(rt, level[i]))
                g.add_edge(rt, level[i + 1], grps.get_edge_data(rt, level[i + 1]))
                if grps.successors(level[i]):
                    get_bin_group(grps, bin_grps, rt=level[i], graph=g)
                else:
                    get_bin_group(grps, bin_grps, rt=level[i + 1], graph=g)

    dg = DiGraph()
    dg.add_node(root)
    bin_groups = []
    get_bin_group(groups, bin_grps=bin_groups, rt=root, graph=dg)
    return bin_groups
Beispiel #17
0
def _graph(formula):
  """Build the implication graph"""
  G = DiGraph()
  for (a,b) in formula.iterclause():
		G.add_edge(-a,b)
		G.add_edge(-b,a)
		
  return G
Beispiel #18
0
def test_get_worst_pdc():
    mock_mapp = DiGraph()
    mock_mapp.add_edges_from([('A-1', 'B-1', {'RC': 'L', 'PDC': 5}),
                              ('A-1', 'B-2', {'RC': 'O', 'PDC': 7}),
                              ('A-1', 'B-3', {'RC': 'L', 'PDC': 15})])
    result = MapGraph._get_worst_pdc.im_func(mock_mapp, 'A-1',
                                             ['B-1', 'B-2', 'B-3'])
    nt.assert_equal(result, 15)
Beispiel #19
0
def test_get_best_is():
    mock_mapp = DiGraph()
    mock_mapp.add_edges_from([('A-1', 'B-1', {'RC': 'I', 'PDC': 5}),
                              ('A-1', 'B-2', {'RC': 'I', 'PDC': 7}),
                              ('A-1', 'B-3', {'RC': 'S', 'PDC': 5})])
    result = MapGraph._get_best_is.im_func(mock_mapp, 'A-1',
                                           ['B-1', 'B-2', 'B-3'])
    nt.assert_equal(result, ('B-1', 5))
Beispiel #20
0
def test_get_worst_pdc_in_tp():
    mock_g = DiGraph()
    mock_g.add_edges_from([('A', 'B', {'PDC': 0}), ('B', 'C', {'PDC': 5}),
                           ('C', 'D', {'PDC': 7}), ('D', 'E', {'PDC': 17})])
    tp = ['B', 'C', 'D']
    nt.assert_equal(MapGraph._get_worst_pdc_in_tp.im_func(mock_g, 'A', tp,
                                                             'E'),
                    17)
Beispiel #21
0
def test_new_attributes_are_better():
    mock_g = DiGraph()
    mock_g.add_edge('A', 'B', PDC=5, TP=['C', 'D', 'E'])
    method = MapGraph._new_attributes_are_better.im_func
    nt.assert_true(method(mock_g, 'A', 'B', 18, []))
    nt.assert_true(method(mock_g, 'A', 'B', 2, ['C', 'D', 'E']))
    nt.assert_false(method(mock_g, 'A', 'B', 2, ['C', 'D', 'E', 'F']))
    nt.assert_false(method(mock_g, 'A', 'B', 5, ['C', 'D', 'E']))
Beispiel #22
0
    def test_missing_edges(self):
        """A tournament must not have any pair of nodes without at least
        one edge joining the pair.

        """
        G = DiGraph()
        G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3)])
        assert_false(is_tournament(G))
Beispiel #23
0
 def __init__(self, conec=[],  **kwargs):
     """
     Calls DiGraph constructor and checks if the graph is connected and acyclic
     """
     DiGraph.__init__(self, **kwargs)
     DiGraph.add_edges_from(self, conec)
     #self.add_edges_from(conec)  #copy maximum recursion here
     if not self._is_connected(): raise ValueError("Not connected graph")
     if not self._is_directed_acyclic_graph(): raise ValueError("Not acyclic graph")
Beispiel #24
0
 def add_edge(self,  u,  v = None):
     temp = self.copy()
     DiGraph.add_edge(temp,  u,  v = v)
     if not temp._is_directed_acyclic_graph():
         raise ValueError("Edge (%s, %s) creates a cycle" %(u, v) )
     elif not temp._is_connected():
         raise ValueError("Edge (%s, %s) creates disconnected graph" %(u, v) )
     else:
         DiGraph.add_edge(self,  u,  v = v)
Beispiel #25
0
 def __init__(self, data=None, name='', eNewick=None, ignore_prefix=None, id_offset=0):
     # initialization here
     DiGraph.__init__(self, data)
     self.name = name
     self._labels = {}
     self._lastlabel = id_offset
     self.cache = {}
     if eNewick != None:
         self._from_eNewick(eNewick, ignore_prefix=ignore_prefix)
Beispiel #26
0
def test_organize_by_rc():
    mock_mapp = DiGraph()
    mock_mapp.add_edges_from([('A-1', 'B-1', {'RC': 'I'}),
                              ('A-1', 'B-2', {'RC': 'L'}),
                              ('A-1', 'C-3', {'RC': 'O'})])
    result = MapGraph._organize_by_rc.im_func(mock_mapp, 'A-1', ['B-1', 'B-2'])
    nt.assert_equal(result, {'IS': ['B-1'], 'LO': ['B-2']})
    result = MapGraph._organize_by_rc.im_func(mock_mapp, 'A-1', ['C-3'])
    nt.assert_equal(result, {'IS': [], 'LO': ['C-3']})
Beispiel #27
0
    def test_bidirectional_edges(self):
        """A tournament must not have any pair of nodes with greater
        than one edge joining the pair.

        """
        G = DiGraph()
        G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
        G.add_edge(1, 0)
        assert_false(is_tournament(G))
Beispiel #28
0
 def add_edges_from(self,  ebunch):
     temp = self.copy()
     DiGraph.add_edges_from(temp, ebunch)
     if not temp._is_directed_acyclic_graph():
         raise ValueError("Edges %s create a cycle" %(ebunch, ) )
     elif not temp._is_connected():
         raise ValueError("Edges %s create disconnected graph" %(ebunch, ) )
     else:
         DiGraph.add_edges_from(self,  ebunch)
Beispiel #29
0
	def init_graph(self):
		#Represent the graph using adjacency lists
		g = DiGraph()
		g.add_nodes_from(self.vocabulary) #The keys (terms) become nodes in the graph
		for x in self.vocabulary:
			for y in self.vocabulary:
				if self.conditional_probability(x,y) >= 0.8 and self.conditional_probability(y,x) < 0.8:
					g.add_edge(x,y)
		return g 
Beispiel #30
0
 def __init__(self, styles, max_label = 4, name2URL = None,
              data=None, name='', file=None, **attr):
     """
     Constructor 
     """
     self.styles    = styles
     self.max_label = max_label
     self.name2URL = name2URL
     DiGraph.__init__(self, data=data,name=name,**attr)
Beispiel #31
0
def iter_roots(g: nx.DiGraph) -> Generator[Hashable, None, None]:
    for n in g.nodes():
        if not list(g.predecessors(n)):
            yield n
Beispiel #32
0
class TestsIssue32(unittest.TestCase):
    """
    Tests for issue #32
    https://github.com/torressa/cspy/issues/32
    """
    def setUp(self):
        # Maximum and minimum resource arrays
        self.max_res, self.min_res = [5, 10e5, 1], [0, 0, 0]
        # Create simple digraph with appropriate attributes
        # No resource costs required for custom REFs
        self.G = DiGraph(directed=True, n_res=3)
        self.G.add_edge('Source', 1, res_cost=array([0, 0, 0]), weight=-1)
        self.G.add_edge(1, 2, res_cost=array([0, 0, 0]), weight=-1)
        self.G.add_edge(2, 3, res_cost=array([0, 0, 0]), weight=-10)
        self.G.add_edge(2, 4, res_cost=array([0, 1, 0]), weight=-10)
        self.G.add_edge(3, 4, res_cost=array([0, 1, 0]), weight=-10)
        self.G.add_edge(4, 'Sink', res_cost=array([0, 0, 0]), weight=-1)

    def custom_REF(self, cumulative_res, edge):
        res_new = array(cumulative_res)
        # Unpack edge
        u, v, edge_data = edge[0:3]
        # Monotone resource
        res_new[0] += 1
        # Increasing resource
        if v == "Sink":
            res_new[1] = res_new[1]
        else:
            res_new[1] += int(v)**2
        # Resource reset
        res_new[2] += edge_data["res_cost"][1]

        return res_new

    @parameterized.expand(zip(range(100), range(100)))
    def testBiDirectionalBothDynamic(self, _, seed):
        """
        Find shortest path of simple test digraph using the BiDirectional
        algorithm for a range of seeds.
        Note the first argument is required to work using parameterized and unittest.
        """
        bidirec = BiDirectional(self.G,
                                self.max_res,
                                self.min_res,
                                REF=self.custom_REF,
                                seed=seed)
        # Check classification
        with self.assertLogs('cspy.algorithms.bidirectional') as cm:
            bidirec.name_algorithm()
        # Log should contain the word 'dynamic'
        self.assertRegex(cm.output[0], 'dynamic')
        # Check exception for not running first
        with self.assertRaises(Exception) as context:
            bidirec.path
        self.assertTrue("run()" in str(context.exception))
        # Run and test results
        bidirec.run()
        path = bidirec.path
        cost = bidirec.total_cost
        total_res = bidirec.consumed_resources
        self.assertEqual(path, ['Source', 1, 2, 3, 4, 'Sink'])
        self.assertEqual(cost, -23)
        self.assertTrue(all(total_res == [5, 30, 1]))
Beispiel #33
0
def mapmatch(waypoint_sets: dict,
             g0: nx.DiGraph,
             kne: callable,
             knn=None,
             callback: callable = None,
             stubborn: float = 1,
             many_partial: bool = False) -> Generator:
    """
	Find a plausible bus route or pieces of it along the waypoints.

	(TODO)
	Returns:
		A dictionary with the following keys
			- 'path' is a list of nodes of the graph g0, as an estimate of the route
			- 'geo_path' is a list of (lat, lon) coordinates of those nodes
			- 'edge_clouds' is a list of edge clouds, one for each waypoint
			- 'active_edges' is a list of currently selected edges, one for each edge cloud
	"""

    # Dictionary to collect status and result (updated continuously)
    result = {'waypoint_sets': waypoint_sets, 'mapmatcher_version': 11111143}

    # Before doing anything
    result['status'] = "zero"
    if callback: callback(result)

    # Check connectivity of the graph
    if (g0.number_of_nodes() > max(
            map(len, nx.strongly_connected_components(g0)))):
        raise MapMatchingError(
            "The graph appears not to be strongly connected")

    # The graph will be modified from here on
    # g0 = deepcopy(g0)
    # commons.logger.warning("Attributes added to the graph supplied to mapmatch")

    # Check for the existence of those attributes
    assert (nx.get_node_attributes(g0, 'pos'))  # Node (lat, lon)
    assert (nx.get_edge_attributes(g0, 'len'))  # Edge lengths (meters)

    # Nearest neighbors
    if not knn:
        knn = compute_geo_knn(nx.get_node_attributes(g0, 'pos'))

    # Original weights for shortest path computation
    g0_weights = {(a, b):
                  d['len'] / (PARAM['speed']['highway'][d['highway']] / 3.6)
                  for (a, b, d) in g0.edges.data()}

    commons.logger.debug("Computing waypoint nearest edges for each run...")

    # Waypoints' nearest edges: wp --> (dist_cloud : edge --> distance)
    def get_wp2ne(wpts):
        return {wp: dict(kne(wp)) for wp in set(wpts)}

    # Compute the nearest edges map for each waypoint set
    waypoints_kne = {
        setid: get_wp2ne(wp2ne)
        for (setid, wp2ne) in progressbar(waypoint_sets.items())
    }

    # Keep only those that are "inside" the graph
    waypoints_kne = {
        setid: wp2ne
        for (setid, wp2ne) in waypoints_kne.items()
        if (max(min(dd.values())
                for dd in wp2ne.values()) <= PARAM['max_wp_to_graph_dist'])
    }

    # Waypoints from all sets
    result['waypoints_all'] = list(
        chain.from_iterable(wp2ne.keys() for wp2ne in waypoints_kne.values()))

    if not result['waypoints_all']:
        raise MapMatchingError("No waypoints near the graph")

    # CHECK if there are edge repeats within clouds
    for dc in waypoints_kne.values():
        if not commons.all_distinct(dc.keys()):
            commons.logger.warning("Repeated edges in cloud: {}".format(
                sorted(dc.keys())))

    # Make pairs (setid, waypoint_group)
    # using waypoints_kne and waypoint_sets
    def split_into_groups():
        for (setid, wp2ne) in waypoints_kne.items():

            # Waypoints of this set w/o consecutive repeats
            waypoints = commons.remove_repeats(waypoint_sets[setid])

            if many_partial:
                # Avoid getting stuck in places of dense waypoints
                waypoints = list(
                    sparsified(waypoints,
                               dist=(PARAM['waypoints_min_distance'] / 10)))

                # Extract groups of consecutive waypoints
                groups = [
                    list(
                        sparsified(waypoints[k:],
                                   dist=PARAM['waypoints_min_distance']))
                    [0:PARAM['waypoints_groupsize']]
                    for k in range(0, len(waypoints),
                                   round(PARAM['waypoints_groupsize'] / 3))
                ]

                # Remove too-small groups
                groups = [
                    g for g in groups
                    if (len(g) >= PARAM['waypoints_min_number'])
                ]

                # Remove redundant groups
                k = 1
                while (k < len(groups)):
                    if set(groups[k]).issubset(set(groups[k - 1])):
                        groups.pop(k)
                    else:
                        k += 1

                commons.logger.info(
                    "From set {}, extracted {} waypoint subgroups".format(
                        setid, len(groups)))

                # Mapmatch on the subgroups
                # Note: the loop could be empty
                for wpts in groups:
                    yield (setid, wpts)

            else:
                # Do not split waypoints into subgroups
                yield (setid,
                       sparsified(waypoints,
                                  dist=PARAM['waypoints_min_distance']))

    # List of pairs (setid, waypoint_group)
    commons.logger.debug("Grouping waypoints into subgroups...")
    groups = list(split_into_groups())

    if not groups:
        raise MapMatchingError("No waypoint groups to mapmatch")

    commons.logger.debug("Got {} subgroups".format(len(groups)))

    # Takes waypoints from presult['waypoints_used']
    # Extracts a neighborhood graph of the waypoints
    def mapmatch_prepare_subgraph(presult: dict) -> dict:

        # A cloud of candidate edges for each waypoint
        dist_clouds = [
            dict(waypoints_kne[presult['waypoint_setid']][wp])
            for wp in presult['waypoints_used']
        ]

        # The structure of the graph will be modified in this routine
        g1: nx.DiGraph

        if PARAM['do_graph_extract']:

            # Start with the nodes of nearest edges
            nodes = set(
                chain.from_iterable(
                    chain.from_iterable(dc.keys()) for dc in dist_clouds))

            waypoints = presult['waypoints_used']

            # Add nodes in a neighborhood of the waypoints
            for (p, x, q) in zip(waypoints, waypoints[1:], waypoints[2:]):
                r = max(
                    200,
                    2 * max(commons.geodesic(x, p), commons.geodesic(x, q)))
                nodes |= set(
                    knn['node_ids'][j]
                    for j in knn['knn_tree'].query_radius([x], r=r)[0])

            # Extract subgraph on the 'nodes', keep only the main component
            g1 = g0.subgraph(
                max(nx.strongly_connected_components(g0.subgraph(nodes)),
                    key=len)).copy()

            # The graph may not contain all the nearest edges anymore
            dist_clouds = [{e: d
                            for (e, d) in dc.items() if g1.has_edge(*e)}
                           for dc in dist_clouds]

            if any(dist_clouds):
                # Now some of dist_clouds are empty, remove those
                (waypoints,
                 dist_clouds) = zip(*((wp, dc)
                                      for (wp,
                                           dc) in zip(waypoints, dist_clouds)
                                      if dc))
            else:
                # All dist_clouds are empty
                waypoints = []
                dist_clouds = []

            # Did graph truncation render some waypoints unusable?
            if (len(waypoints) < len(presult['waypoints_used'])):
                commons.logger.warning(
                    "Number of waypoints reduced from {} to {}".format(
                        len(presult['waypoints_used']), len(waypoints)))
                presult['waypoints_used'] = waypoints

            commons.logger.debug(
                "Mapmatch graph has {} nodes around {} waypoints".format(
                    g1.number_of_nodes(), len(waypoints)))

        else:
            g1 = g0.copy()

        # Edge attr for shortest path computation
        nx.set_edge_attributes(g1, g0_weights, name=PARAM['weight'])

        # Mark the original nodes as basenodes
        nx.set_node_attributes(g1, {n: n for n in g1.nodes}, name='basenode')

        #
        presult['(g)'] = g1
        presult['(dist_clouds)'] = dist_clouds
        presult['(callback)'] = callback
        #
        presult['many_partial'] = many_partial
        presult['stubborn'] = stubborn

        return presult

    # List of 'result' starting points -- "pre-results"
    presults = [{
        **result,
        **{
            'waypoint_setid': si,
            'waypoints_used': wu
        }
    } for (si, wu) in groups]

    # Mapmatch groups in random order
    if PARAM['waypoints_mapmatch_shuffle']:
        random.shuffle(presults)

    # Mapmatch only so many groups
    presults = presults[0:PARAM['waypoints_mapmatch_max_groups']]
    assert (presults)

    # MAPMATCH DRIVER
    commons.logger.info("Starting mapmatch on {} subgroups".format(
        len(presults)))

    def complete_all(presults) -> Generator:

        # Attach a relevant graph extract around the waypoints
        presults = map(mapmatch_prepare_subgraph, presults)

        # The previous step may have reduced the number of waypoints in subgroups
        presults = filter((lambda p: len(p['waypoints_used']) >= PARAM[
            'waypoints_min_number']), presults)

        # Mapmatch -- batch-parallel version
        # Note: 'Parallel' does not yield until all tasks are complete
        for presult_batch in commons.batchup(presults,
                                             5 * commons.PARALLEL_MAP_CPUS):
            yield from commons.parallel_map(mapmatch_complete_this,
                                            presult_batch)

        # # Mapmatch -- serial version
        # for result in incomplete :
        # 	try :
        # 		result = mapmatch_complete_this_2(result)
        # 	except :
        # 		commons.logger.error("Group mapmatch failed within set #{} \n{}".format(result['waypoint_setid'], traceback.format_exc()))
        # 		time.sleep(1)
        # 		continue
        #
        # 	yield result

        # # Mapmatch -- parallel version
        # yield from Parallel(n_jobs=8)(delayed(mapmatch_complete_this_2)(result) for result in incomplete)

    yield from complete_all(progressbar(presults))
Beispiel #34
0
def create_di_graph(cfg: networkx.DiGraph, previous_node: int, 
                    current_node: int) -> None:
    """Creates control flow graph of the program.

    Note: This function is a recursive function.

    In control flow graph a node represents a basic block, and an edge
    represents the connection between two basic blocks. Starting line number of 
    a basic block is the id of the node which represents that basic block.

    Previous node and current node are id numbers of two nodes. In other words
    they are starting line numbers of two basic blocks. Current node is the new 
    node which will be added to the cfg. This function works as follows:

    1. It adds a new node to the cfg with the current_node id number. It updates
    the information of the node with the values from start_list and end_list.
    2. It adds an edge between the current node and previous node. Current node 
    is one of the targets of previous node.
    3. It checks the targets of the current node (newly added basic block) by
    looking at end_list. If there is a target information it calls itself again. 
    In the new call, the id number of newly added node (current_node) will be 
    passed as previous_node argument, and the target of the newly added node 
    will be passed as current_node argument.
    

    Parameters
    ----------
    cfg : directed graph
        This is the control flow graph of the program.
    previous_node : int
        The id number of the previous node. In other words the starting line
        number of the previous basic block.
    current_node : int
        The id number of the current node. In other words the starting line
        number of the current basic block. This node will be added to the
        control flow graph.
    """

    index = 0

    if (current_node in cfg_set) & (previous_node != -1):
        cfg.add_edge(previous_node, current_node)
        return

    for i in range(0, len(start_list)):
        if (start_list[i] == current_node):
            index = i

    cfg.add_node(current_node)
    cfg_set.add(current_node)

    if (previous_node != -1):
        cfg.add_edge(previous_node, current_node)

    cfg.nodes[current_node]['label'] = "Start: " + str(start_list[index]) + "; End: " + str(end_list[index][0])
    cfg.nodes[current_node]['start'] = start_list[index]
    cfg.nodes[current_node]['end'] = end_list[index][0]
    for i1 in range(1, len(end_list[index])):
        target = 'target' + str(i1)
        print(target)
        cfg.nodes[current_node][target] = "null"

    for i2 in range(1, len(end_list[index])):
        target = 'target' + str(i2)
        print(target)
        cfg.nodes[current_node][target] = end_list[index][i2]
        create_di_graph(cfg, current_node, end_list[index][i2])
Beispiel #35
0
 def __init__(self) -> None:
     self._reaction_graph = DiGraph()
def mark_input_ports_lexicographically_based_on_input_node_key(graph: nx.DiGraph):
    for node_key in graph.nodes:
        input_edges = graph.in_edges(node_key)
        sorted_input_edges = sorted(input_edges, key=lambda x: x[0])
        for idx, edge in enumerate(sorted_input_edges):
            graph.edges[edge][NNCFGraph.INPUT_PORT_ID_EDGE_ATTR] = idx
Beispiel #37
0
def graph_wrapper_dependencies(config_dir: str, config_dict, graph: networkx.DiGraph, visited: List[str]):
    """
    Given a directory, recursively finds all other directories it depends on and builds a graph.
    :param config_dir: The config directory to obtain a dependency graph for
    :param config_dict: A dictionary containing wrapper config objects for each seen directory
    :param graph: The graph to add dependency info to. Empty at first, reused in recursion.
    :param visited: A list of visited nodes. Empty at first, reused in recursion
    """
    if config_dir in visited:
        return
    visited.append(config_dir)

    if config_dict.get(config_dir):  # add to dictionary so we only read the file once
        wrapper_config_obj = config_dict[config_dir].get("wrapper_config")
    else:
        wrapper_config_obj = create_wrapper_config_obj(config_dir)
        config_dict[config_dir] = {
            "wrapper_config": wrapper_config_obj
        }

    if wrapper_config_obj.config:
        graph.add_node(config_dir)

    tf_dependencies = wrapper_config_obj.depends_on

    if tf_dependencies is None:
        print("Cannot list a dependency without tf_wrapper dependency configuration:", config_dir)
        sys.exit(1)

    for dependency in tf_dependencies:
        graph.add_node(dependency)
        if config_dir in graph:
            graph.add_edge(dependency, config_dir)

    wrappers = find_wrapper_config_files(config_dir)
    wrappers.reverse()  # we want the closest wrapper file that gives inherited dependencies
    for wrapper in wrappers:
        wrapper_dir = os.path.dirname(wrapper)
        if config_dict.get(wrapper_dir):  # add to dictionary so we only read the file once
            new_wrapper_config_obj = config_dict[wrapper_dir].get("wrapper_config")
        else:
            new_wrapper_config_obj = create_wrapper_config_obj(wrapper_dir)
            config_dict[wrapper_dir] = {
                "wrapper_config": new_wrapper_config_obj
            }

        if wrapper_dir == config_dir:
            continue
        if new_wrapper_config_obj.depends_on is not None:
            inherited_dependencies = new_wrapper_config_obj.depends_on
            added = False
            for dependency in inherited_dependencies:
                if dependency == config_dir:
                    continue
                added = True
                graph.add_node(dependency)
                if config_dir in graph:
                    graph.add_edge(dependency, config_dir)
            if added:
                break  # we only need the closest, the recursion will handle anything higher

    for predecessor in list(graph.predecessors(config_dir)):
        graph_wrapper_dependencies(predecessor, config_dict, graph, visited)
Beispiel #38
0
class TestsToy:
    def setup(self):
        """
        Creates a toy graph.
        """
        self.G = DiGraph()
        for v in [1, 2, 3, 4, 5]:
            self.G.add_edge("Source", v, cost=10, time=20)
            self.G.add_edge(v, "Sink", cost=10, time=20)
            self.G.nodes[v]["demand"] = 5
            self.G.nodes[v]["upper"] = 100
            self.G.nodes[v]["lower"] = 5
            self.G.nodes[v]["service_time"] = 1
        self.G.nodes[2]["upper"] = 20
        self.G.nodes["Sink"]["upper"] = 100
        self.G.nodes["Source"]["upper"] = 100
        self.G.add_edge(1, 2, cost=10, time=20)
        self.G.add_edge(2, 3, cost=10, time=20)
        self.G.add_edge(3, 4, cost=15, time=20)
        self.G.add_edge(4, 5, cost=10, time=25)

    #################
    # subsolve cspy #
    #################

    def test_cspy_stops(self):
        """Tests column generation procedure on toy graph with stop constraints"""
        prob = VehicleRoutingProblem(self.G, num_stops=3)
        prob.solve()
        assert prob.best_value == 70
        assert prob.best_routes[1] in [
            ["Source", 1, 2, 3, "Sink"],
            ["Source", 4, 5, "Sink"],
        ]
        assert set(prob.best_routes_cost.values()) == {30, 40}
        prob.solve(exact=False)
        assert prob.best_value == 70

    def test_cspy_stops_capacity(self):
        """Tests column generation procedure on toy graph
           with stop and capacity constraints
        """
        prob = VehicleRoutingProblem(self.G, num_stops=3, load_capacity=10)
        prob.solve()
        assert prob.best_value == 80
        assert set(prob.best_routes_load.values()) == {5, 10}

    def test_cspy_stops_capacity_duration(self):
        """Tests column generation procedure on toy graph
           with stop, capacity and duration constraints
        """
        prob = VehicleRoutingProblem(self.G,
                                     num_stops=3,
                                     load_capacity=10,
                                     duration=62)
        prob.solve(exact=False)
        assert prob.best_value == 85
        assert set(prob.best_routes_duration.values()) == {41, 62}
        assert prob.node_load[1]["Sink"] in [5, 10]

    def test_cspy_stops_time_windows(self):
        """Tests column generation procedure on toy graph
           with stop, capacity and time_window constraints
        """
        prob = VehicleRoutingProblem(
            self.G,
            num_stops=3,
            time_windows=True,
        )
        prob.solve()
        assert prob.best_value == 80
        assert prob.departure_time[1]["Source"] == 0
        assert prob.arrival_time[1]["Sink"] in [41, 62]

    ###############
    # subsolve lp #
    ###############

    def test_LP_stops(self):
        """Tests column generation procedure on toy graph with stop constraints"""
        prob = VehicleRoutingProblem(self.G, num_stops=3)
        prob.solve(cspy=False)
        assert prob.best_value == 70
        prob.solve(cspy=False, pricing_strategy="BestEdges1")
        assert prob.best_value == 70

    def test_LP_stops_capacity(self):
        """Tests column generation procedure on toy graph"""
        prob = VehicleRoutingProblem(self.G, num_stops=3, load_capacity=10)
        prob.solve(cspy=False)
        assert prob.best_value == 80

    def test_LP_stops_capacity_duration(self):
        """Tests column generation procedure on toy graph"""
        prob = VehicleRoutingProblem(
            self.G,
            num_stops=3,
            load_capacity=10,
            duration=62,
        )
        prob.solve(cspy=False)
        assert prob.best_value == 85

    def test_LP_stops_time_windows(self):
        """Tests column generation procedure on toy graph"""
        prob = VehicleRoutingProblem(
            self.G,
            num_stops=3,
            time_windows=True,
        )
        prob.solve(cspy=False)
        assert prob.best_value == 80

    def test_LP_stops_elementarity(self):
        """Tests column generation procedure on toy graph"""
        self.G.add_edge(2, 1, cost=2)
        prob = VehicleRoutingProblem(
            self.G,
            num_stops=3,
        )
        prob.solve(cspy=False)
        assert prob.best_value == 67

    #########
    # other #
    #########

    def test_all(self):
        prob = VehicleRoutingProblem(self.G,
                                     num_stops=3,
                                     time_windows=True,
                                     duration=63,
                                     load_capacity=10)
        prob.solve(cspy=False)
        lp_best = prob.best_value
        prob.solve(cspy=True)
        cspy_best = prob.best_value
        assert int(lp_best) == int(cspy_best)

    def test_initial_solution(self):
        prob = VehicleRoutingProblem(self.G, num_stops=4)
        routes = [
            ["Source", 1, "Sink"],
            ["Source", 2, 3, "Sink"],
            ["Source", 4, 5, "Sink"],
        ]

        prob.solve(initial_routes=routes, cspy=False)
        assert prob.best_value == 70

    def test_knapsack(self):
        self.G.nodes["Source"]["demand"] = 0
        self.G.nodes["Sink"]["demand"] = 0
        assert get_num_stops_upper_bound(self.G, 10) == 4

    def test_pricing_strategies(self):
        sol = []
        for strategy in [
                "Exact", "BestPaths", "BestEdges1", "BestEdges2", "Hyper"
        ]:
            prob = VehicleRoutingProblem(self.G, num_stops=4)
            prob.solve(pricing_strategy=strategy)
            sol.append(prob.best_value)
        assert len(set(sol)) == 1

    def test_lock(self):
        routes = [["Source", 3, "Sink"]]
        prob = VehicleRoutingProblem(self.G, num_stops=4)
        prob.solve(preassignments=routes)
        assert prob.best_value == 80

    def test_partial_lock(self):
        routes = [["Source", 3]]
        prob = VehicleRoutingProblem(self.G, num_stops=4)
        prob.solve(preassignments=routes)
        assert prob.best_value == 75

    def test_complete_lock(self):
        routes = [
            ["Source", 1, "Sink"],
            ["Source", 2, "Sink"],
            ["Source", 3, "Sink"],
            ["Source", 4, "Sink"],
            ["Source", 5, "Sink"],
        ]
        prob = VehicleRoutingProblem(self.G)
        prob.solve(preassignments=routes)
        assert prob.best_value == 100

    def test_extend_preassignment(self):
        routes = [[2, 3]]
        prob = VehicleRoutingProblem(self.G, num_stops=4)
        prob.solve(preassignments=routes)
        assert prob.best_value == 70

    def test_pick_up_delivery(self):
        self.G.nodes[2]["request"] = 5
        self.G.nodes[2]["demand"] = 10
        self.G.nodes[3]["demand"] = 10
        self.G.nodes[3]["request"] = 4
        self.G.nodes[4]["demand"] = -10
        self.G.nodes[5]["demand"] = -10
        self.G.add_edge(2, 5, cost=10)
        self.G.remove_node(1)
        prob = VehicleRoutingProblem(
            self.G,
            load_capacity=15,
            pickup_delivery=True,
        )
        prob.solve(pricing_strategy="Exact", cspy=False)
        assert prob.best_value == 65

    def test_distribution_collection(self):
        self.G.nodes[1]["collect"] = 12
        self.G.nodes[4]["collect"] = 1
        prob = VehicleRoutingProblem(
            self.G,
            load_capacity=15,
            distribution_collection=True,
        )
        prob.solve(cspy=False)
        lp_sol = prob.best_value
        prob.solve(cspy=True)
        cspy_sol = prob.best_value
        assert lp_sol == cspy_sol
        assert lp_sol == 80

    def test_fixed_cost(self):
        prob = VehicleRoutingProblem(self.G, num_stops=3, fixed_cost=100)
        prob.solve()
        assert prob.best_value == 70 + 200
        assert set(prob.best_routes_cost.values()) == {30 + 100, 40 + 100}

    def test_drop_nodes(self):
        prob = VehicleRoutingProblem(self.G,
                                     num_stops=3,
                                     num_vehicles=1,
                                     drop_penalty=100)
        prob.solve()
        assert prob.best_value == 240
        assert prob.best_routes == {1: ["Source", 1, 2, 3, "Sink"]}

    def test_periodic(self):
        self.G.nodes[2]["frequency"] = 2
        prob = VehicleRoutingProblem(self.G, num_stops=2, periodic=2)
        prob.solve()
        assert prob.best_value == 90
        frequency = 0
        for r in prob.best_routes:
            if 2 in prob.best_routes[r]:
                frequency += 1
        assert frequency == 2
        assert prob.schedule[0] in [[1], [1, 2]]

    def test_mixed_fleet(self):
        for (i, j) in self.G.edges():
            self.G.edges[i, j]["cost"] = 2 * [self.G.edges[i, j]["cost"]]
        prob = VehicleRoutingProblem(
            self.G,
            load_capacity=[10, 15],
            fixed_cost=[10, 0],
            num_vehicles=[5, 1],
            mixed_fleet=True,
        )
        prob.solve()
        assert prob.best_value == 80
        assert set(prob.best_routes_type.values()) == {0, 1}

    def test_time_limit(self):
        prob = VehicleRoutingProblem(self.G, num_stops=3)
        start = time()
        prob.solve(cspy=False, time_limit=0.01)
        comp_time = time() - start
        assert comp_time < 0.01 + 0.15  # time_limit + time for mip
        assert prob.best_value == 70

    def test_dive(self):
        for (i, j) in self.G.edges():
            self.G.edges[i, j]["cost"] = 2 * [self.G.edges[i, j]["cost"]]
        prob = VehicleRoutingProblem(
            self.G,
            load_capacity=[10, 15],
            fixed_cost=[10, 0],
            num_vehicles=[5, 1],
            mixed_fleet=True,
        )
        prob.solve(dive=True)
        assert prob.best_value == 80
from networkx import DiGraph, read_edgelist, hits_numpy
from sys import stdout


def get_top_keys(dictionary, top):
    items = dictionary.items()
    items.sort(reverse=True, key=lambda x: x[1])
    return items[:top]


print "Reading in Full Graph."
stdout.flush()
g = read_edgelist('data/wiki-Talk.txt', create_using=DiGraph(), nodetype=int)

print "HITS."
stdout.flush()

hubs, authorities = hits_numpy(g)

file = open("results/hubs_numpy.txt", "w+")
file.write("Top 100 Hubs by HITS\n")
for node in get_top_keys(hubs, 100):
    file.write("{} {}\n".format(node[0], node[1]))
file.close()

file = open("results/authorities_numpy.txt", "w+")
file.write("Top 100 Authorities by HITS\n")
for node in get_top_keys(authorities, 100):
    file.write("{} {}\n".format(node[0], node[1]))
file.close()
    def Run(self, evento):
        if self.avvio == 1:
            return
        if self.dataset == 0:
            self.box.insert(END, 'YOU MUST IMPORT DATASET')
            self.box.update()
            return
        if self.iter == 0:
            self.box.delete(0, END)
        self.pulsanteavvio.config(text='Running...',
                                  bg='white',
                                  font=("Helvetica", self.font),
                                  borderwidth=0,
                                  relief="groove")
        self.pulsantestop.config(bg='red',
                                 font=("Helvetica", self.font, 'bold'),
                                 borderwidth=2,
                                 relief="groove")
        self.pulsanteclear.config(bg='white',
                                  font=("Helvetica", self.font),
                                  borderwidth=0,
                                  relief="groove")
        self.pulsanteavvio.update()
        self.box.grid(row=1, column=0, columnspan=5, rowspan=3)
        self.scrollbar.grid(row=1, ipady=267)
        self.box.config(height=29)
        self.pulsantestop.update()
        self.box.update()

        self.avvio = 1
        G = self.Graph.copy()
        dataset = split(self.dataset, [0, self.casi.get() + 1])
        dataset = dataset[1]
        '''
      Inizzializzaizone. Creo tre insiemi:
          ArrayIndex[u]=indice del nodo "u" nel dataset
          ArrayScore[u]=fattore di score riferito al nodo "u" (data la rete corrente e il dataset)
          ArrayRi[u]=numero di valori che assume il nodo "u" nel dataset
      '''
        self.box.update()
        proporzione = len(complement(G).edges()) + len(G.edges()) + 1
        tmp_score = DiGraph()
        for n in G.nodes():
            self.box.update()
            tmp_score.add_node(n, index=datasetIndex(n, dataset))

        ArrayIndex = get_node_attributes(tmp_score, 'index')
        self.box.update()
        c = 0
        for n in G.nodes():
            c = c + 1
            Ri_nodo = Ri(n, dataset)
            if self.avvio == 0:
                return
            if self.iter == 0:
                self.box.delete(END)
                self.box.insert(
                    END,
                    'Progress: ' + str(int(c * 100 / len(G.nodes()))) + '%')
            self.box.see(END)
            self.box.update()
            tmp_score.add_node(n,
                               score=Formula(G, n, dataset, Ri_nodo,
                                             ArrayIndex),
                               Ri=Ri_nodo)

        ArrayScore = get_node_attributes(tmp_score, 'score')
        ArrayRi = get_node_attributes(tmp_score, 'Ri')
        self.box.delete(END)
        self.box.update()
        '''
      Ottengo lo score della rete sommando tutti i valori di ArrayScore
      '''
        tmp_score = 0
        for n in G.nodes():
            tmp_score = tmp_score + ArrayScore[n]
        string = "score iniziale= " + str(tmp_score)
        self.box.update()
        if self.avvio == 0:
            return
        if self.iter == 0:
            self.box.insert(END, string)
            self.box.insert(END, ' ')
            self.time = 0
        else:
            self.iter = self.iter - 1
        self.box.update()
        inizio = time()
        self.tmptime = inizio
        self.box.update()
        tipo = 'tipo di operazione elementare'
        while tipo != 'not found':
            self.box.update()
            inizio_iterazione = time()
            tipo = 'not found'
            self.iter = self.iter + 1
            c = 0
            self.box.insert(END, 'Search: ' + str(c) + '%')
            self.box.see(END)
            self.box.update()
            scoreIniziale = tmp_score
            if self.avvio == 0:
                return

            for (u, v) in G.edges():
                c = c + 1
                self.box.update()
                if self.avvio == 0:
                    return
                G.remove_edge(u, v)  #rimuove
                tmp_score2 = scoreIniziale - ArrayScore[v] + Formula(
                    G, v, dataset, ArrayRi[v], ArrayIndex)
                if tmp_score2 < tmp_score:
                    tmp_score = tmp_score2
                    padre = u
                    figlio = v
                    tipo = 'remove'

                G.add_edge(v, u)
                self.box.update()
                if is_directed_acyclic_graph(
                        G) == True and self.avvio == 1:  #inverte
                    tmp_score2 = tmp_score2 - ArrayScore[u] + Formula(
                        G, u, dataset, ArrayRi[u], ArrayIndex)
                    if tmp_score2 < tmp_score:
                        tmp_score = tmp_score2
                        padre = u
                        figlio = v
                        tipo = 'reverse'
                G.add_edge(u, v)
                self.box.delete(END)
                self.box.insert(
                    END, 'Search: ' + str(int(c * 100 / proporzione)) + '%')
                self.box.see(END)
                self.box.update()
                G.remove_edge(v, u)

            comp = list(complement(G).edges())
            for (u, v) in complement(G).edges():
                if (v, u) in G.edges():
                    c = c + 1
                    self.box.delete(END)
                    self.box.insert(
                        END,
                        'Search: ' + str(int(c * 100 / proporzione)) + '%')
                    self.box.see(END)
                    self.box.update()
                    comp.remove((u, v))

            for (u, v) in comp:  #aggiunge
                G.add_edge(u, v)
                self.box.update()
                c = c + 1
                if self.avvio == 0:
                    return
                if is_directed_acyclic_graph(G) == True:
                    tmp_score2 = scoreIniziale - ArrayScore[v] + Formula(
                        G, v, dataset, ArrayRi[v], ArrayIndex)
                    if tmp_score2 < tmp_score:
                        tmp_score = tmp_score2
                        tipo = 'append'
                        padre = u
                        figlio = v
                self.box.delete(END)
                self.box.insert(
                    END, 'Search: ' + str(int(c * 99 / proporzione)) + '%')
                self.box.see(END)
                self.box.update()
                G.remove_edge(u, v)

            if (tipo == 'remove'):
                G.remove_edge(padre, figlio)
                ArrayScore[
                    figlio] = tmp_score - scoreIniziale + ArrayScore[figlio]

            elif (tipo == 'reverse'):
                G.add_edge(figlio, padre)
                G.remove_edge(padre, figlio)
                ArrayScore[figlio] = Formula(G, figlio, dataset,
                                             ArrayRi[figlio], ArrayIndex)
                ArrayScore[padre] = Formula(G, padre, dataset, ArrayRi[padre],
                                            ArrayIndex)

            elif (tipo == 'append'):
                G.add_edge(padre, figlio)
                ArrayScore[
                    figlio] = tmp_score - scoreIniziale + ArrayScore[figlio]

            c = c + 1
            self.box.delete(END)
            self.box.insert(END,
                            'Search: ' + str(int(c * 99 / proporzione)) + '%')
            self.box.see(END)
            self.box.update()

            if (tipo != 'not found'):
                string = str(self.iter) + ') ' + tipo + " (" + str(
                    padre) + ")→(" + str(figlio) + ")"
                self.box.delete(END)
                self.box.insert(END, string)
                string = '     score= ' + str(tmp_score)
                string = string + '      ' + tempo(time() - inizio_iterazione)
                self.box.insert(END, string)
                self.box.insert(END, '    ')
                self.box.see(END)
                self.Graph = G.copy()
                drawPng(G)
                self.updateImmagine(self)
                self.box.update()

        if (self.avvio == 1):
            self.box.delete(END)
            self.box.insert(END, '  ')
            self.time = self.time + time() - self.tmptime
            string = "SCORE FINALE=" + str(tmp_score) + "    " + str(
                tempo(self.time))
            self.box.insert(END, string)
            self.box.update()
            self.iter = 0
            self.avvio = 0
            self.updateImmagine(self)

        self.pulsanteavvio.config(text='Run')
        self.pulsantestop.config(bg='white',
                                 font=("Helvetica", self.font),
                                 borderwidth=0,
                                 relief="groove")
        self.pulsanteclear.config(bg='yellow',
                                  font=("Helvetica", self.font, 'bold'),
                                  borderwidth=2,
                                  relief="groove")
        self.box.see(END)
        self.pulsanteavvio.update()
        self.pulsantestop.update()
        self.box.see(END)
Beispiel #41
0
 def setup(self):
     G = DiGraph()
     G.add_edge("Source", 8, cost=0)
     G.add_edge("Source", 6, cost=1)
     G.add_edge("Source", 2, cost=1)
     G.add_edge("Source", 5, cost=1)
     G.add_edge(8, 6, cost=0)
     G.add_edge(6, 2, cost=0)
     G.add_edge(2, 5, cost=0)
     G.add_edge(5, "Sink", cost=0)
     G.add_edge(8, "Sink", cost=1)
     G.add_edge(6, "Sink", cost=1)
     G.add_edge(2, "Sink", cost=1)
     G.nodes[8]["demand"] = 8
     G.nodes[6]["demand"] = 4
     G.nodes[2]["demand"] = 1
     G.nodes[5]["demand"] = 2
     G.nodes[8]["collect"] = 1
     G.nodes[6]["collect"] = 1
     G.nodes[2]["collect"] = 1
     G.nodes[5]["collect"] = 2
     self.prob = VehicleRoutingProblem(G,
                                       load_capacity=15,
                                       distribution_collection=True)
Beispiel #42
0
def mark_removed_circular_dep(pkgs: Dict[str, ResolvedPkg], G: DiGraph, node,
                              removed_node):
    pkgs[node].removed_circular_deps.add(removed_node)
    for pred in G.predecessors(node):
        mark_removed_circular_dep(pkgs, G, pred, removed_node)
Beispiel #43
0
class GraphBuilder():
    """
    A class for building the reaction graph.
    """
    def __init__(self) -> None:
        self._reaction_graph = DiGraph()

    def _add_node(self, node_id: str, type: NodeType, label: str) -> None:
        """
        Adding a node if the node is not already in the graph.

        Args:
            node_id: The ID of a graph node e.g. a domain will have the ID A_[dom]
            type: The type of a node e.g. component, domain, residue
            label: The label of the node e.g. the domain name (dom)

        Returns:

        """
        if not self._reaction_graph.has_node(node_id):
            logger.debug('Adding new node node_id: {0} label: {1}, type: {2}'.format(node_id, label, type))
            self._reaction_graph.add_node(node_id, label=label, type=type.value)

    def _add_edge(self, source: str, target: str, interaction: EdgeType, width: EdgeWith) -> None:
        """
        Adding an edge to the graph.

        Note:
             Internal edges are edges within the different levels of an specification.
             External edges are edges between specific resolution levels of two specifications
                e.g. between component and domain, domain and domain and so on.

        Args:
            source: The source of an edge e.g. component node, domain node, residue node.
            target: The target of an edge e.g. component node, domain node, residue node.
            interaction (EdgeType): The type of an interaction.
            width (EdgeWith): The width of an edge.

        Returns:
            None

        """
        if not self._reaction_graph.has_edge(source, target):
            logger.debug('Adding new edge source: {0} target: {1}, interaction: {2}, width: {3}'.format(source,
                                                                                                        target,
                                                                                                        interaction.value,
                                                                                                        width))
            self._reaction_graph.add_edge(source, target, interaction=interaction.value, width=width.value)
        elif width == EdgeWith.external:
            logger.debug('Adding replace inner edge with external edge source: {0} target: {1}, interaction: {2}, width: {3}'.format(source,
                                                                                                                                      target,
                                                                                                                                      interaction.value,
                                                                                                                                      width))
            self._reaction_graph.add_edge(source, target, interaction=interaction.value, width=width.value)

    def add_external_edge(self, source: Spec, target: Spec, type: EdgeType) -> None:
        """
        Adding an external edge.

        Note:
            An external edges is an edge between two specific resolution levels of two specifications respectively
                e.g. between component and domain, domain and domain and so on.
        Args:
            source: The source specification.
            target: The target specification.
            type (EdgeType): The type of this edge e.g. interaction, modification etc.

        Returns:
            None

        """
        logger.info('Adding external edge source: {0} target: {1}, interaction: {2}'.format(get_node_id(source, source.resolution),
                                                                                            get_node_id(target, target.resolution),
                                                                                            type))
        self._add_edge(get_node_id(source, source.resolution), get_node_id(target, target.resolution),
                       interaction=type, width=EdgeWith.external)

    def add_spec_information(self, specification: Spec) -> None:
        """
        Adding specification information to the reaction graph.

        Args:
            specification: The specification of a reaction reactant

        Returns:
            None

        """

        def _add_spec_nodes() -> None:
            logger.info('Adding component node -> id: {0}, label: {1}'.format(get_node_id(specification, NodeType.component),
                                                                              get_node_label(specification, NodeType.component)))
            self._add_node(node_id=get_node_id(specification, NodeType.component), type=NodeType.component,
                           label=get_node_label(specification, NodeType.component))

            if specification.locus.domain:
                logger.info('Adding domain node -> id: {0}, label: {1}'.format(get_node_id(specification, NodeType.domain),
                                                                               get_node_label(specification, NodeType.domain)))
                self._add_node(get_node_id(specification, NodeType.domain), type=NodeType.domain,
                               label=get_node_label(specification, NodeType.domain))
            if specification.locus.residue:
                logger.info('Adding residue node id: {0}, label: {1}'.format(get_node_id(specification, NodeType.residue),
                                                                             get_node_label(specification, NodeType.residue)))
                self._add_node(get_node_id(specification, NodeType.residue), type=NodeType.residue,
                               label=get_node_label(specification, NodeType.residue))

        def _add_spec_edges() -> None:
            """
            Adding internal edges between nodes.

            Note:
                Internal edges are edges within the different levels of an specification.

            Returns:
                None

            """

            if specification.locus.domain:
                logger.info('Adding internal edge component -> domain source: {} target: {}'.format(get_node_id(specification, NodeType.component),
                                                                                           get_node_id(specification, NodeType.domain)))
                self._add_edge(get_node_id(specification, NodeType.component),
                               get_node_id(specification, NodeType.domain), interaction=EdgeType.interaction,
                               width=EdgeWith.internal)

                if specification.locus.residue:
                    logger.info('Adding internal edge domain -> residue source: {} target: {}'.format(get_node_id(specification, NodeType.domain),
                                                                                             get_node_id(specification, NodeType.residue)))
                    self._add_edge(get_node_id(specification, NodeType.domain), get_node_id(specification, NodeType.residue),
                                   interaction=EdgeType.interaction, width=EdgeWith.internal)
            elif specification.locus.residue:
                logger.info('Adding internal edge component -> residue source: {} target: {}'.format(get_node_id(specification, NodeType.component),
                                                                       get_node_id(specification, NodeType.residue)))
                self._add_edge(get_node_id(specification, NodeType.component), get_node_id(specification, NodeType.residue),
                               interaction=EdgeType.interaction, width=EdgeWith.internal)
        logger.info('Adding nodes for {}'.format(specification))
        _add_spec_nodes()
        logger.info('Adding internal edges for {}'.format(specification))
        _add_spec_edges()

    def get_graph(self) -> DiGraph:
        """
        Returning the reaction graph

        Returns:
            The reaction graph (DiGraph).

        """
        return self._reaction_graph
 def test_same_node_is_reachable(self):
     """Tests that a node is always reachable from itself."""
     # G is an arbitrary tournament on ten nodes.
     G = DiGraph(sorted(p) for p in combinations(range(10), 2))
     assert all(is_reachable(G, v, v) for v in G)
Beispiel #45
0
    def _analyze_core(self, graph: networkx.DiGraph):

        endnodes = [ node for node in graph.nodes() if graph.out_degree[node] == 0 ]
        graph_changed = False

        # to_update is keyed by the region head.
        # this is because different end nodes may lead to the same region head: consider the case of the typical "fork"
        # region where stack canary is checked in x86-64 binaries.
        to_update: Dict[Any,Tuple[List[Tuple[Any,Any]],networkx.DiGraph]] = { }

        for end_node in endnodes:
            in_edges = list(graph.in_edges(end_node))

            if len(in_edges) > 1:
                region = networkx.DiGraph()
                region.add_node(end_node)
                region_head = end_node
            elif len(in_edges) == 1:
                # back-trace until it reaches a node with two predecessors
                region, region_head = self._single_entry_region(graph, end_node)
                tmp_in_edges = graph.in_edges(region_head)
                # remove in_edges that are coming from a node inside the region
                in_edges = [ ]
                for src, dst in tmp_in_edges:
                    if src not in region:
                        in_edges.append((src, dst))
            else:  # len(in_edges) == 0
                continue

            # region and in_edge might have been updated. re-check
            if not in_edges:
                # this is a single connected component in the graph
                # no need to duplicate anything
                continue
            if len(in_edges) == 1:
                # there is no need to duplicate it
                continue
            if len(in_edges) < self.min_indegree:
                # does not meet the threshold
                continue

            to_update[region_head] = in_edges, region

        for region_head, (in_edges, region) in to_update.items():
            # update the graph
            for in_edge in in_edges:
                pred_node = in_edge[0]

                # Modify the graph and then add an edge to the copy of the region
                copies = { }
                queue = [ (pred_node, region_head) ]
                while queue:
                    pred, node = queue.pop(0)
                    if node in copies:
                        node_copy = copies[node]
                    else:
                        node_copy = node.copy()
                        node_copy.idx = next(self.node_idx)
                        copies[node] = node_copy

                    graph.add_edge(pred, node_copy)

                    for succ in region.successors(node):
                        queue.append((node_copy, succ))

            # remove all in-edges
            graph.remove_edges_from(in_edges)
            # remove the node to be copied
            graph.remove_nodes_from(region)
            graph_changed = True

        return graph_changed
 def test_unreachable_pair(self):
     """Tests for an unreachable pair of nodes."""
     G = DiGraph([(0, 1), (0, 2), (1, 2)])
     assert not is_reachable(G, 1, 0)
Beispiel #47
0
def arrange_graph(graph: nx.DiGraph) -> nx.DiGraph:
    """
    Arranges nodes in the given graph.

    Nodes referring to the same visit will have the same y-coordinate.

    With respect to determining x-coordinates, a weighted sum between pairs of
    clusters is enumerated. This quantity refers to the number of patients who
    transition between any two clusters at any time point. Weights are equal to
    `2 ^ (visit number - 1)`. The edge with the highest sum is used to
    establish the two anchors in a list. For each additional cluster C to
    arrange, we determine the highest sum between it and the already sorted
    clusters. To the right of the sorted cluster with the highest weight
    becomes the new position for that cluster.

    Args:
        graph: The graph to arrange.

    Returns:
        A graph with x- and y-coordinates computed for each node.
    """

    # Attach y-coordinates to the nodes.

    for n in graph.nodes():

        graph.node[n]['y'] = (graph.node[n]['visitid'] - 1) * 72

    # Determine an ordering of clusters.

    edge_lengths = collections.Counter()

    for edge in graph.edges(data=True):

        cluster1 = get_base_classification(edge[2]['sourcecluster'])

        cluster2 = get_base_classification(edge[2]['targetcluster'])

        if cluster1 != cluster2:

            weight = edge[2]['weight'] * 2**(
                graph.node[edge[2]['sourcecluster']]['visitid'] - 1)

            edge_lengths[cluster1, cluster2] += weight

            edge_lengths[cluster2, cluster1] += weight

    cluster_order = None

    for edge, _ in edge_lengths.most_common():

        if not cluster_order:

            cluster_order = list(edge)

        else:

            cluster1, cluster2 = edge

            if cluster1 in cluster_order and cluster2 in cluster_order:

                continue

            if cluster1 in cluster_order:

                cluster_order.insert(
                    cluster_order.index(cluster1) + 1, cluster2)

            elif cluster2 in cluster_order:

                cluster_order.insert(
                    cluster_order.index(cluster2) + 1, cluster1)

            else:

                cluster_order += list(edge)

    # Attach unassigned clusters, i.e., those with no connections.

    all_clusters = {
        graph.node[node]['clusterletter']
        for node in graph.nodes()
    }

    missing_nodes = sorted(all_clusters - set(cluster_order))

    cluster_order += missing_nodes

    # Then attach x-coordinates.

    for node in graph.nodes():

        graph.node[node]['x'] = cluster_order.index(graph.node[node][
            'clusterletter']) * 72

    return graph
 def test_is_strongly_connected(self):
     """Tests for a strongly connected tournament."""
     G = DiGraph([(0, 1), (1, 2), (2, 0)])
     assert is_strongly_connected(G)
Beispiel #49
0
def addAntonymAttribute(
    G: nx.DiGraph, attribute: str, left_right: (str, str),
    att_left_right: (Any, Any)) -> None:
    G.nodes[left_right[0]][attribute] = att_left_right[0]
    G.nodes[left_right[1]][attribute] = att_left_right[1]
 def test_not_strongly_connected(self):
     """Tests for a tournament that is not strongly connected."""
     G = DiGraph([(0, 1), (0, 2), (1, 2)])
     assert not is_strongly_connected(G)
Beispiel #51
0
def addAntonymInfo(
    G: nx.DiGraph, left_right: (str, str),
    left_right_terms: (list, list)) -> None:
    G.add_edges_from([(sub, left_right[0]) for sub in left_right_terms[0]])
    G.add_edges_from([(sub, left_right[1]) for sub in left_right_terms[1]])
 def test_is_tournament(self):
     G = DiGraph()
     G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
     assert is_tournament(G)
Beispiel #53
0
def iter_leaves(g: nx.DiGraph) -> Generator[Hashable, None, None]:
    for n in g.nodes():
        if not list(g.successors(n)):
            yield n
 def test_self_loops(self):
     """A tournament must have no self-loops."""
     G = DiGraph()
     G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
     G.add_edge(0, 0)
     assert not is_tournament(G)
 def test_path_is_hamiltonian(self):
     G = DiGraph()
     G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
     path = hamiltonian_path(G)
     assert len(path) == 4
     assert all(v in G[u] for u, v in zip(path, path[1:]))
Beispiel #56
0
def calc_total_affinity(network: nx.DiGraph) -> float:
    view = network.edges(data="support")
    affinities = [support.affinity for _, _, support in view]
    return np.sum(affinities)
def solveMaximumMulticommodityFlow(G: nx.DiGraph, commodities: dict) -> dict:
    """
    Solves the multicommodity maximum flow problem.
    :param G: directed graph
    :param commodities: Dict of source-sink-pairs
    :return: Dict of edges and flow values
    """
    multicommodityFlow = Model('MulticommodityFlow')

    # Variable
    X = dict()
    B = dict()

    for k in commodities:
        B[k] = multicommodityFlow.addVar(vtype=GRB.CONTINUOUS,
                                         lb=0,
                                         name=f'B_{k}')
        for a in G.edges():
            X[a, k] = multicommodityFlow.addVar(
                vtype=GRB.CONTINUOUS,
                lb=0,
                ub=G.get_edge_data(*a)['capacity'],
                name=f'X_{a}_{k}')

    # Objective function
    multicommodityFlow.setObjective(quicksum(B[k] for k in commodities),
                                    sense=GRB.MAXIMIZE)

    # Constraints
    for k, val in commodities.items():
        for v in G.nodes():
            if v == val[0]:
                multicommodityFlow.addConstr(
                    quicksum(X[a, k] for a in G.out_edges(v)) -
                    quicksum(X[a, k] for a in G.in_edges(v)), GRB.EQUAL, B[k])
            elif v == val[1]:
                multicommodityFlow.addConstr(
                    quicksum(X[a, k] for a in G.out_edges(v)) -
                    quicksum(X[a, k] for a in G.in_edges(v)), GRB.EQUAL, -B[k])
            else:
                multicommodityFlow.addConstr(
                    quicksum(X[a, k] for a in G.out_edges(v)) -
                    quicksum(X[a, k] for a in G.in_edges(v)), GRB.EQUAL, 0)

    for a in G.edges():
        multicommodityFlow.addConstr(quicksum(X[a, k] for k in commodities),
                                     GRB.LESS_EQUAL,
                                     G.get_edge_data(*a)['capacity'])

    # Solve model
    multicommodityFlow.update()
    multicommodityFlow.optimize()

    if multicommodityFlow.status == GRB.OPTIMAL:
        flows = dict()
        for k in commodities:
            flows[k] = dict()
            for a in G.edges():
                if multicommodityFlow.getVarByName(f'X_{a}_{k}').x > 0:
                    flows[k][a] = multicommodityFlow.getVarByName(
                        f'X_{a}_{k}').x
        return flows

    else:
        return dict()
def test_build_type_graph():
    int_source = IntSource()
    float_store = FloatStore()
    int_float = IntFloatTransformer()
    float_int = FloatIntTransformer()
    string = StringTransformer()

    sources = {int_source, float_store}
    sinks = {float_store}
    transformers = {int_float, float_int, string}

    expected = DiGraph()
    expected.add_node(str)
    expected.add_node(int, sources={int_source})
    expected.add_node(float, sources={float_store}, sinks={float_store})
    expected.add_edge(int, float, cost=1, transformer=int_float)
    expected.add_edge(float, int, cost=3, transformer=float_int)
    expected.add_edge(str, int, cost=1, transformer=string)
    expected.add_edge(str, float, cost=1, transformer=string)
    expected.add_edge(int, str, cost=1, transformer=string)
    expected.add_edge(float, str, cost=1, transformer=string)

    # noinspection PyTypeChecker
    actual = _build_type_graph(sources, sinks, transformers)

    for node in expected.nodes():
        assert expected.node[node] == actual.node[node]

    for source, target in expected.edges():
        assert expected.edge[source][target] == actual.edge[source][target]

    for node in actual.nodes():
        assert expected.node[node] == actual.node[node]

    for source, target in actual.edges():
        assert expected.edge[source][target] == actual.edge[source][target]
Beispiel #59
0
    def _construct(self):
        """
        We want to build the type of DFG that's used in "Automated Ident. of Crypto
        Primitives in Binary Code with Data Flow Graph Isomorphisms." Unlike that
        paper, however, we're building it on Vex IR instead of assembly instructions.
        """
        cfg = self._cfg
        p = self.project
        dfgs = {}
        l.debug("Building Vex DFG...")

        for node in cfg.nodes():
            try:
                if node.simprocedure_name == None:
                    irsb = p.factory.block(node.addr).vex
                else:
                    l.debug("Cannot process SimProcedures, ignoring %s" %
                            node.simprocedure_name)
                    continue
            except Exception as e:
                l.debug(e)
                continue
            tmpsnodes = {}
            storesnodes = {}
            putsnodes = {}
            statements = irsb.statements
            dfg = DiGraph()

            for stmt_idx, stmt in enumerate(statements):
                # We want to skip over certain types, such as Imarks
                if self._need_to_ignore(node.addr, stmt, stmt_idx):
                    continue

                # break statement down into sub-expressions
                exprs = stmt.expressions
                stmt_node = stmt
                dfg.add_node(stmt)

                if stmt.tag == 'Ist_WrTmp':
                    tmpsnodes[stmt.tmp] = stmt_node
                    if exprs[0].tag == 'Iex_Binop':
                        if exprs[1].tag == 'Iex_RdTmp':
                            dfg.add_edge(tmpsnodes[exprs[1].tmp], stmt_node)
                        else:
                            dfg.add_edge(exprs[1], stmt_node)
                        if exprs[2].tag == 'Iex_RdTmp':
                            dfg.add_edge(tmpsnodes[exprs[2].tmp], stmt_node)
                        else:
                            dfg.add_edge(exprs[2], stmt_node)

                    elif exprs[0].tag == 'Iex_Unop':
                        dfg.remove_node(stmt_node)
                        if exprs[1].tag == 'Iex_RdTmp':
                            tmpsnodes[stmt.tmp] = copy(tmpsnodes[exprs[1].tmp])
                            tmpsnodes[stmt.tmp].tmp = stmt.tmp
                        else:
                            tmpsnodes[stmt.tmp] = exprs[1]

                    elif exprs[0].tag == 'Iex_RdTmp':
                        tmpsnodes[stmt.tmp] = copy(tmpsnodes[exprs[0].tmp])
                        tmpsnodes[stmt.tmp].tmp = stmt.tmp

                    elif exprs[0].tag == 'Iex_Get':
                        if putsnodes.has_key(exprs[0].offset):
                            dfg.add_edge(putsnodes[exprs[0].offset], stmt_node)
                        if len(exprs) > 1 and exprs[1].tag == "Iex_RdTmp":
                            dfg.add_edge(tmpsnodes[exprs[1].tmp], stmt_node)
                        elif len(exprs) > 1:
                            dfg.add_edge(exprs[1], stmt_node)

                    elif exprs[0].tag == 'Iex_Load':
                        if exprs[1].tag == 'Iex_RdTmp':
                            dfg.add_edge(tmpsnodes[exprs[1].tmp], stmt_node)
                        else:
                            dfg.add_edge(exprs[1], stmt_node)

                    else:
                        # Take a guess by assuming exprs[0] is the op and any other expressions are args
                        for e in exprs[1:]:
                            if e.tag == 'Iex_RdTmp':
                                dfg.add_edge(tmpsnodes[e.tmp], stmt_node)
                            else:
                                dfg.add_edge(e, stmt_node)

                elif stmt.tag == 'Ist_Store':
                    if exprs[0].tag == 'Iex_RdTmp':
                        dfg.add_edge(tmpsnodes[exprs[0].tmp], stmt_node)

                    elif exprs[0].tag == 'Iex_Const':
                        dfg.add_edge(exprs[0], stmt_node)

                    if exprs[1].tag == 'Iex_RdTmp':
                        dfg.add_edge(tmpsnodes[exprs[1].tmp], stmt_node)
                    else:
                        dfg.add_edge(exprs[1], stmt_node)

                elif stmt.tag == 'Ist_Put':
                    if exprs[0].tag == 'Iex_RdTmp':
                        dfg.add_edge(tmpsnodes[exprs[0].tmp], stmt_node)
                    elif exprs[0].tag == 'Iex_Const':
                        dfg.add_edge(exprs[0], stmt_node)
                    putsnodes[stmt.offset] = stmt_node

                elif stmt.tag == 'Ist_Exit':
                    if exprs[0].tag == 'Iex_RdTmp':
                        dfg.add_edge(tmpsnodes[exprs[0].tmp], stmt_node)

                elif stmt.tag == 'Ist_Dirty':
                    tmpsnodes[stmt.tmp] = stmt_node
                elif stmt.tag == 'Ist_CAS':
                    tmpsnodes[stmt.oldLo] = stmt_node

                else:
                    for e in stmt.expressions:
                        if e.tag == 'Iex_RdTmp':
                            dfg.add_edge(tmpsnodes[e.tmp], stmt_node)
                        else:
                            dfg.add_edge(e, stmt_node)

            for vtx in list(dfg.nodes()):
                if dfg.degree(vtx) == 0:
                    dfg.remove_node(vtx)

            if dfg.size() > 0:
                dfgs[node.addr] = dfg
        return dfgs
Beispiel #60
0
  Author:   --<>
  Purpose: 
  Created: 2018/7/25
"""

import unittest
from lpp import *
from networkx import DiGraph

if __name__ == '__main__':
    RAW = open(sys.argv[1], 'rU')
    END = open(sys.argv[2], 'w')
    has = {}
    seq = ""
    number = 1
    network = DiGraph()
    for line in RAW:
        line_l = line.strip().split()

        if line_l[0] not in has:
            has[line_l[0]] = ""
            start = line_l[7] + line_l[8]

        else:
            end = line_l[7] + line_l[8]
            network.add_edge(start, end)
            start = end
    for start, end in network.edges():
        END.write(start + '\t' + end + '\n')
    #END.write('>scaffold%s\n'%(number)+seq+'\n')