示例#1
0
 def __init__(self, net):
     self._graph = pygraphviz.AGraph(strict=False,
                                     directed=True,
                                     ranksep=1.5)
     self._net = net
示例#2
0
    def layout(self, type_):

        if pygraphviz:
            G = pygraphviz.AGraph(strict=False, directed=True)

            if type_ == 'dot LR':
                G.graph_attr['rankdir'] = 'LR'

            type_ = type_.split()[0]
            G.graph_attr['ranksep'] = '0.125'

        elif pydot:
            G = pydot.Dot('graphname', graph_type='digraph')

            if type_ == 'dot LR':
                G.set_layout('dot')
                G.set('rankdir', 'LR')
            else:
                G.set_layout(type_)

            G.set('ranksep', '0.125')

        for from_, to in self.link.values():
            if pygraphviz:
                G.add_edge(
                    (from_, from_.gnx),
                    (to, to.gnx),
                )
            elif pydot:
                G.add_edge(pydot.Edge(from_.gnx, to.gnx))

        for i in self.nodeItem:
            if pygraphviz:
                G.add_node((i, i.gnx))
            elif pydot:
                gnode = pydot.Node(i.gnx)

                rect = self.nodeItem[i].boundingRect()
                G.add_node(gnode)

                for child in i.children:
                    key = (i, child)

                    if key not in self.hierarchyLinkItem or child not in self.nodeItem:
                        continue

                    G.add_edge(pydot.Edge(i.gnx, child.gnx))

        if pygraphviz:
            G.layout(prog=type_)
        elif pydot:
            tempName = tempfile.NamedTemporaryFile(dir=tempfile.gettempdir(),
                                                   delete=False)
            G.write_dot(tempName.name)
            G = pydot.graph_from_dot_file(tempName.name)

        for i in self.nodeItem:
            if pygraphviz:
                gn = G.get_node((i, i.gnx))
                x, y = map(float, gn.attr['pos'].split(','))

                i.u['_bklnk']['x'] = x
                i.u['_bklnk']['y'] = -y
                self.nodeItem[i].setPos(x, -y)
                self.nodeItem[i].do_update()

            elif pydot:
                lst = G.get_node(''.join(['"', i.gnx, '"']))
                if len(lst) > 0:
                    x, y = map(float, lst[0].get_pos().strip('"').split(','))
                    i.u['_bklnk']['x'] = x
                    i.u['_bklnk']['y'] = -y
                    self.nodeItem[i].setPos(x, -y)
                    self.nodeItem[i].do_update()

        if pydot:
            x, y, width, height = map(float, G.get_bb().strip('"').split(','))
            self.ui.canvasView.setSceneRect(
                self.ui.canvas.sceneRect().adjusted(x, y, width, height))

        self.do_update(adjust=False)

        self.center_graph()
示例#3
0
#https://github.com/profthyagu/Python-Decision-Tree-Using-ID3

# Load libraries
import math
import pandas as pd
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
import operator
from sklearn.metrics import confusion_matrix

import pydotplus

import pygraphviz as pgv

G = pgv.AGraph(directed=True)


def entropy(attr_name):
    entropy = 0
    row_del_dict = dict()
    inner_row_del_dict = dict()
    # getting column contents of current attribute name from data extracted from csv file
    attr_list = list(data[attr_name])
    if attr_name == 'fast':
        # get number of unique classes in attribute
        class_attr_list = set(attr_list)
        all_classes = list(class_attr_list)  # convert back to list
        # print(all_classes)
        for x in all_classes:
            probability = attr_list.count(x) / len(attr_list)
示例#4
0
    def __init__(self,
                 treatment_name,
                 outcome_name,
                 graph=None,
                 common_cause_names=None,
                 instrument_names=None,
                 effect_modifier_names=None,
                 mediator_names=None,
                 observed_node_names=None,
                 missing_nodes_as_confounders=False):
        self.treatment_name = parse_state(treatment_name)
        self.outcome_name = parse_state(outcome_name)
        instrument_names = parse_state(instrument_names)
        common_cause_names = parse_state(common_cause_names)
        effect_modifier_names = parse_state(effect_modifier_names)
        mediator_names = parse_state(mediator_names)
        self.logger = logging.getLogger(__name__)

        if graph is None:
            self._graph = nx.DiGraph()
            self._graph = self.build_graph(common_cause_names,
                                           instrument_names,
                                           effect_modifier_names,
                                           mediator_names)
        elif re.match(r".*\.dot", graph):
            # load dot file
            try:
                import pygraphviz as pgv
                self._graph = nx.DiGraph(nx.drawing.nx_agraph.read_dot(graph))
            except Exception as e:
                self.logger.error("Pygraphviz cannot be loaded. " + str(e) +
                                  "\nTrying pydot...")
                try:
                    import pydot
                    self._graph = nx.DiGraph(
                        nx.drawing.nx_pydot.read_dot(graph))
                except Exception as e:
                    self.logger.error("Error: Pydot cannot be loaded. " +
                                      str(e))
                    raise e
        elif re.match(r".*\.gml", graph):
            self._graph = nx.DiGraph(nx.read_gml(graph))
        elif re.match(r".*graph\s*\{.*\}\s*", graph):
            try:
                import pygraphviz as pgv
                self._graph = pgv.AGraph(graph, strict=True, directed=True)
                self._graph = nx.drawing.nx_agraph.from_agraph(self._graph)
            except Exception as e:
                self.logger.error("Error: Pygraphviz cannot be loaded. " +
                                  str(e) + "\nTrying pydot ...")
                try:
                    import pydot
                    P_list = pydot.graph_from_dot_data(graph)
                    self._graph = nx.drawing.nx_pydot.from_pydot(P_list[0])
                except Exception as e:
                    self.logger.error("Error: Pydot cannot be loaded. " +
                                      str(e))
                    raise e
        elif re.match(".*graph\s*\[.*\]\s*", graph):
            self._graph = nx.DiGraph(nx.parse_gml(graph))
        else:
            self.logger.error(
                "Error: Please provide graph (as string or text file) in dot or gml format."
            )
            self.logger.error("Error: Incorrect graph format")
            raise ValueError
        if missing_nodes_as_confounders:
            self._graph = self.add_missing_nodes_as_common_causes(
                observed_node_names)
        # Adding node attributes
        self._graph = self.add_node_attributes(observed_node_names)
if __name__ == '__main__':
    dotFile_or_edgeFile = raw_input("d or e: ")

    if dotFile_or_edgeFile in ['d', 'e']:
        fp_name = raw_input("input file: ")

        if dotFile_or_edgeFile == 'e':
            fp = open(fp_name, 'r')
            G = nx.MultiGraph()
            for line in fp:
                line = line.split()
                if len(line) == 3:
                    G.add_edge(line[0], line[1], label=line[2])
            fp.close()
            nx.drawing.nx_pydot.write_dot(G, 'tempfile.dot')
            A = pgv.AGraph('tempfile.dot', strict=False)
        else:
            A = pgv.AGraph(fp_name, strict=False)

        A.layout(prog='dot')
        A.draw('file.png')
        print("done...")

        G = nx.Graph(nx.drawing.nx_pydot.read_dot('tempfile.dot'))

        # here find min distance from given position node n
        start = str(raw_input("start vertex: "))

        min_wt = 0
        weights = get_edge_attributes(G, 'label')
def layered_callback(data):
    LG = pg.AGraph(directed=False, strict=True)
    LG.layout(prog='dot')

    rospy.loginfo("layered nodes reading starts..")

    max_layer = 0

    for lay_node_id in data.lay_nodes:
        if lay_node_id.state == "red":
            if lay_node_id.layer > max_layer:
                max_layer = lay_node_id.layer

    rospy.loginfo("Maximum layer = %d", max_layer)

    count_robot = []
    count_red = []
    count_blue = []
    count_target = []

    for i in range(0, max_layer + 1):
        count_robot.append(0)
        count_red.append(0)
        count_blue.append(0)
    for i in range(max_layer):
        count_target.append(0)

    for lay_node_id in data.lay_nodes:
        if lay_node_id.state == "robot":
            for i in range(0, max_layer + 1):
                if lay_node_id.layer == i:
                    count_robot[i] += 1
        if lay_node_id.state == "red":
            for i in range(0, max_layer + 1):
                if lay_node_id.layer == i:
                    count_red[i] += 1
        if lay_node_id.state == "blue":
            for i in range(0, max_layer + 1):
                if lay_node_id.layer == i:
                    count_blue[i] += 1
        if lay_node_id.state == "target":
            for i in range(0, max_layer):
                if lay_node_id.layer - 1 == i:
                    count_target[i] += 1

    for i in range(0, max_layer + 1):
        rospy.loginfo("Number of robot layered nodes = %d at layer %d",
                      count_robot[i], i)
        rospy.loginfo("Number of red layered nodes = %d at layer %d",
                      count_red[i], i)
        rospy.loginfo("Number of blue layered nodes = %d at layer %d",
                      count_blue[i], i)
    for i in range(0, max_layer):
        rospy.loginfo("Number of target layered nodes = %d at layer %d",
                      count_target[i], i + 1)

    interval_nodes = 150

    pos_robot_start = []
    pos_red_start = []
    pos_blue_start = []
    pos_target_start = []

    for i in range(0, max_layer + 1):
        if count_robot[i] % 2 == 0:
            pos_robot_start.append(-1 *
                                   (count_robot[i] / 2 - 1) * interval_nodes -
                                   interval_nodes / 2)
        else:
            pos_robot_start.append(-1 * (count_robot[i] - 1) / 2 *
                                   interval_nodes)
        if count_red[i] % 2 == 0:
            pos_red_start.append(-1 * (count_red[i] / 2 - 1) * interval_nodes -
                                 interval_nodes / 2)
        else:
            pos_red_start.append(-1 * (count_red[i] - 1) / 2 * interval_nodes)
        if count_blue[i] % 2 == 0:
            pos_blue_start.append(-1 *
                                  (count_blue[i] / 2 - 1) * interval_nodes -
                                  interval_nodes / 2)
        else:
            pos_blue_start.append(-1 * (count_blue[i] - 1) / 2 *
                                  interval_nodes)
    for i in range(0, max_layer):
        if count_target[i] % 2 == 0:
            pos_target_start.append(-1 * (count_target[i] / 2 - 1) *
                                    interval_nodes - interval_nodes / 2)
        else:
            pos_target_start.append(-1 * (count_target[i] - 1) / 2 *
                                    interval_nodes)

    inc_robot = []
    inc_red = []
    inc_blue = []
    inc_target = []

    for i in range(0, max_layer + 1):
        inc_robot.append(0)
        inc_red.append(0)
        inc_blue.append(0)
    for i in range(max_layer):
        inc_target.append(0)

    init_red_start = 0
    init_robot_start = -75
    init_blue_start = -150
    init_target_start = -225

    # Nodes for layered graph
    for lay_node_id in data.lay_nodes:
        for i in range(0, max_layer + 1):
            if lay_node_id.state == "robot":
                if lay_node_id.layer == i:
                    temp_node_name = "(r" + str(lay_node_id.id) + "," + str(
                        lay_node_id.layer) + ",p" + str(
                            lay_node_id.connected_id) + ")"
                    LG.add_node(temp_node_name)
                    n = LG.get_node(temp_node_name)
                    n.attr['pos'] = "%f,%f)" % (pos_robot_start[i] +
                                                inc_robot[i] * interval_nodes,
                                                init_robot_start - 300 * i)
                    rospy.loginfo(
                        "(r" + str(lay_node_id.id) + ",%d) pos=(%f,%f)",
                        lay_node_id.layer,
                        pos_robot_start[i] + inc_robot[i] * interval_nodes,
                        init_robot_start - 300 * i)
                    inc_robot[i] += 1
            if lay_node_id.state == "red":
                if lay_node_id.layer == i:
                    temp_node_name = "(p" + str(lay_node_id.id) + "," + str(
                        lay_node_id.layer) + ",red)"
                    LG.add_node(temp_node_name)
                    n = LG.get_node(temp_node_name)
                    n.attr['pos'] = "%f,%f)" % (pos_red_start[i] +
                                                inc_red[i] * interval_nodes,
                                                init_red_start - 300 * i)
                    rospy.loginfo(
                        "(red" + str(lay_node_id.id) + ",%d) pos=(%f,%f)",
                        lay_node_id.layer,
                        pos_red_start[i] + inc_red[i] * interval_nodes,
                        init_red_start - 300 * i)
                    inc_red[i] += 1
            if lay_node_id.state == "blue":
                if lay_node_id.layer == i:
                    temp_node_name = "(p" + str(lay_node_id.id) + "," + str(
                        lay_node_id.layer) + ",blue)"
                    LG.add_node(temp_node_name)
                    n = LG.get_node(temp_node_name)
                    n.attr['pos'] = "%f,%f)" % (pos_blue_start[i] +
                                                inc_blue[i] * interval_nodes,
                                                init_blue_start - 300 * i)
                    rospy.loginfo(
                        "(blue" + str(lay_node_id.id) + ",%d) pos=(%f,%f)",
                        lay_node_id.layer,
                        pos_blue_start[i] + inc_blue[i] * interval_nodes,
                        init_blue_start - 300 * i)
                    inc_blue[i] += 1
            if i > 0:
                if lay_node_id.state == "target":
                    if lay_node_id.layer == i:
                        temp_node_name = "(t" + str(
                            lay_node_id.id) + "," + str(
                                lay_node_id.layer) + ",p" + str(
                                    lay_node_id.connected_id) + ")"
                        LG.add_node(temp_node_name)
                        n = LG.get_node(temp_node_name)
                        n.attr['pos'] = "%f,%f)" % (
                            pos_target_start[i - 1] + inc_target[i - 1] *
                            interval_nodes, init_target_start - 300 * (i - 1))
                        rospy.loginfo(
                            "(target" + str(lay_node_id.id) +
                            ",%d) pos=(%f,%f)", lay_node_id.layer,
                            pos_target_start[i - 1] +
                            inc_target[i - 1] * interval_nodes,
                            init_target_start - 300 * (i - 1))
                        inc_target[i - 1] += 1

    # Edges for general graph
    for lay_node_id in data.lay_nodes:
        for i in range(0, max_layer + 1):
            if lay_node_id.state == "robot":
                if lay_node_id.layer == i:
                    for j in range(0, lay_node_id.loc_deg):
                        temp_node_name = "(r" + str(
                            lay_node_id.id) + "," + str(
                                lay_node_id.layer) + ",p" + str(
                                    lay_node_id.connected_id) + ")"
                        temp_node_neighbor_name = "(p" + str(
                            lay_node_id.loc_neighbor_id[j]) + "," + str(
                                lay_node_id.layer) + ",blue)"
                        LG.add_edge(temp_node_name, temp_node_neighbor_name)
                        e = LG.get_edge(temp_node_name,
                                        temp_node_neighbor_name)
                        e.attr['weight'] = lay_node_id.edge_weight[j]
                        temp_weight = lay_node_id.edge_weight[j]
                        temp_weight = float("{0:.2f}".format(temp_weight))
                        e.attr['label'] = str(temp_weight)
            if lay_node_id.state == "red":
                if lay_node_id.layer == i:
                    for j in range(0, lay_node_id.loc_deg):
                        temp_node_name = "(p" + str(
                            lay_node_id.id) + "," + str(
                                lay_node_id.layer) + ",red)"
                        temp_node_neighbor_name = "(r" + str(
                            lay_node_id.loc_neighbor_id[j]) + "," + str(
                                lay_node_id.layer) + ",p" + str(
                                    lay_node_id.id) + ")"
                        LG.add_edge(temp_node_name, temp_node_neighbor_name)
                        e = LG.get_edge(temp_node_name,
                                        temp_node_neighbor_name)
                        e.attr['weight'] = lay_node_id.edge_weight[j]
                        temp_weight = lay_node_id.edge_weight[j]
                        temp_weight = float("{0:.2f}".format(temp_weight))
                        e.attr['label'] = str(temp_weight)
            if i < max_layer:
                if lay_node_id.state == "blue":
                    if lay_node_id.layer == i:
                        for j in range(0, lay_node_id.loc_deg):
                            temp_node_name = "(p" + str(
                                lay_node_id.id) + "," + str(
                                    lay_node_id.layer) + ",blue)"
                            temp_node_neighbor_name = "(t" + str(
                                lay_node_id.loc_neighbor_id[j]) + "," + str(
                                    lay_node_id.layer + 1) + ",p" + str(
                                        lay_node_id.id) + ")"
                            LG.add_edge(temp_node_name,
                                        temp_node_neighbor_name)
                            e = LG.get_edge(temp_node_name,
                                            temp_node_neighbor_name)
                            e.attr['weight'] = lay_node_id.edge_weight[j]
                            temp_weight = lay_node_id.edge_weight[j]
                            temp_weight = float("{0:.2f}".format(temp_weight))
                            e.attr['label'] = str(temp_weight)
            if i > 0:
                if lay_node_id.state == "target":
                    if lay_node_id.layer == i:
                        for j in range(0, lay_node_id.loc_deg):
                            temp_node_name = "(t" + str(
                                lay_node_id.id) + "," + str(
                                    lay_node_id.layer) + ",p" + str(
                                        lay_node_id.connected_id) + ")"
                            temp_node_neighbor_name = "(p" + str(
                                lay_node_id.loc_neighbor_id[j]) + "," + str(
                                    lay_node_id.layer) + ",red)"
                            LG.add_edge(temp_node_name,
                                        temp_node_neighbor_name)
                            e = LG.get_edge(temp_node_name,
                                            temp_node_neighbor_name)
                            e.attr['weight'] = lay_node_id.edge_weight[j]
                            temp_weight = lay_node_id.edge_weight[j]
                            temp_weight = float("{0:.2f}".format(temp_weight))
                            e.attr['label'] = str(temp_weight)

    rospack = rospkg.RosPack()
    LG.write(
        rospack.get_path('max_min_lp_visualization') +
        "/log/layered_graph.dot")
    LG.draw(rospack.get_path('max_min_lp_visualization') +
            "/log/layered_graph.png",
            prog='neato',
            args='-n2')
示例#7
0
import pygraphviz as pgv

node_list = [
    '踩点', '地址和域名', '扫描', '开放端口', '漏洞扫描', '攻击', '口令猜测', '获取权限', '生成报告',
    '拒绝服务攻击', '邮箱', '社会工程学'
]

#['授权', '踩点'],
edge_list = [['踩点', '地址和域名'], ['地址和域名', '扫描'], ['扫描', '开放端口'], ['扫描', '口令猜测'],
             ['开放端口', '漏洞扫描'], ['漏洞扫描', '攻击'], ['攻击', '获取权限'],
             ['口令猜测', '获取权限'], ['获取权限', '生成报告'], ['地址和域名', '拒绝服务攻击'],
             ['踩点', '邮箱'], ['邮箱', '社会工程学'], ['社会工程学', '获取权限']]
g = pgv.AGraph(
    encoding='UTF-8',  # 为了可以显示中文
    # rankdir='LR',       # 从左到右,默认为 TB
    directed=True  # 有向图
)

for node in node_list:
    g.add_node(node,
               fontname='SimSun',
               fontsize=10.5,
               shape='box',
               style='filled')  #宋体
#     g.add_node(vulneral, fontname='Times New Roman', fontsize=12, shape = 'box', style='filled')

for edge in edge_list:
    g.add_edge(edge[0], edge[1])

#layout
示例#8
0
            f1 = open(fpItemVersionLabel, 'r')
            arrVerLabel = f1.read().strip().split('\n')
            f1.close()
            jsonPartAST = ast.literal_eval(arrVerLabel[5])
            jsonPartPseudo = ast.literal_eval(arrVerLabel[11])
            print(type(jsonPartPseudo))
            jsonMixClone = copy.deepcopy(jsonAll)
            getMixJsonDict(jsonMixClone, jsonPartAST, jsonPartPseudo)
            fpItemMixCodeJson = fopItemVersionGraph + 'jsonMix.txt'
            fpItemGraphText = fopItemVersionGraph + 'g_all.dot'
            fpItemGraphPng = fopItemVersionGraph + 'g_all.png'
            f1 = open(fpItemMixCodeJson, 'w')
            f1.write(str(jsonMixClone))
            f1.close()
            graphAll = pgv.AGraph(directed=True)
            strRootProgramId = fonameItemProgram + '-' + fnVersionName
            # print('root program {}'.format(strRootProgramId))
            strRootLabel = ''
            strCodeType = arrVerLabel[0]
            strLOC = arrVerLabel[2]
            arrItemTab = arrVerLabel[10].split('\t')
            numInImpl = int(arrItemTab[0])
            numOutImpl = int(arrItemTab[1])
            strAppearPercent = int((((numInImpl * 100.0) /
                                     (numInImpl + numOutImpl)) // 10) + 1)
            if strAppearPercent == 11:
                strAppearPercent = 10
            strRootLabel = '{}\t{}\t{}'.format(strCodeType, strLOC,
                                               strAppearPercent)
示例#9
0
def render_stmt_graph(statements,
                      reduce=True,
                      english=False,
                      rankdir=None,
                      agent_style=None):
    """Render the statement hierarchy as a pygraphviz graph.

    Parameters
    ----------
    statements : list of :py:class:`indra.statements.Statement`
        A list of top-level statements with associated supporting statements
        resulting from building a statement hierarchy with
        :py:meth:`combine_related`.
    reduce : bool
        Whether to perform a transitive reduction of the edges in the graph.
        Default is True.
    english : bool
        If True, the statements in the graph are represented by their
        English-assembled equivalent; otherwise they are represented as
        text-formatted Statements.
    rankdir : str or None
        Argument to pass through to the  pygraphviz `AGraph` constructor
        specifying graph layout direction. In particular, a value of 'LR'
        specifies a left-to-right direction. If None, the pygraphviz default
        is used.
    agent_style : dict or None
        Dict of attributes specifying the visual properties of nodes. If None,
        the following default attributes are used::

            agent_style = {'color': 'lightgray', 'style': 'filled',
                           'fontname': 'arial'}

    Returns
    -------
    pygraphviz.AGraph
        Pygraphviz graph with nodes representing statements and edges pointing
        from supported statements to supported_by statements.

    Examples
    --------
    Pattern for getting statements and rendering as a Graphviz graph:

    >>> from indra.ontology.bio import bio_ontology
    >>> braf = Agent('BRAF')
    >>> map2k1 = Agent('MAP2K1')
    >>> st1 = Phosphorylation(braf, map2k1)
    >>> st2 = Phosphorylation(braf, map2k1, residue='S')
    >>> pa = Preassembler(bio_ontology, [st1, st2])
    >>> pa.combine_related() # doctest:+ELLIPSIS
    [Phosphorylation(BRAF(), MAP2K1(), S)]
    >>> graph = render_stmt_graph(pa.related_stmts)
    >>> graph.write('example_graph.dot') # To make the DOT file
    >>> graph.draw('example_graph.png', prog='dot') # To make an image

    Resulting graph:

    .. image:: /images/example_graph.png
        :align: center
        :alt: Example statement graph rendered by Graphviz

    """
    import pygraphviz as pgv
    from indra.assemblers.english import EnglishAssembler
    # Set the default agent formatting properties
    if agent_style is None:
        agent_style = {
            'color': 'lightgray',
            'style': 'filled',
            'fontname': 'arial'
        }
    # Sets to store all of the nodes and edges as we recursively process all
    # of the statements
    nodes = set([])
    edges = set([])
    stmt_dict = {}

    # Recursive function for processing all statements
    def process_stmt(stmt):
        nodes.add(str(stmt.matches_key()))
        stmt_dict[str(stmt.matches_key())] = stmt
        for sby_ix, sby_stmt in enumerate(stmt.supported_by):
            edges.add((str(stmt.matches_key()), str(sby_stmt.matches_key())))
            process_stmt(sby_stmt)

    # Process all of the top-level statements, getting the supporting statements
    # recursively
    for stmt in statements:
        process_stmt(stmt)
    # Create a networkx graph from the nodes
    nx_graph = nx.DiGraph()
    nx_graph.add_edges_from(edges)
    # Perform transitive reduction if desired
    if reduce:
        nx_graph = nx.algorithms.dag.transitive_reduction(nx_graph)
    # Create a pygraphviz graph from the nx graph
    try:
        pgv_graph = pgv.AGraph(name='statements',
                               directed=True,
                               rankdir=rankdir)
    except NameError:
        logger.error('Cannot generate graph because '
                     'pygraphviz could not be imported.')
        return None
    for node in nx_graph.nodes():
        stmt = stmt_dict[node]
        if english:
            ea = EnglishAssembler([stmt])
            stmt_str = ea.make_model()
        else:
            stmt_str = str(stmt)
        pgv_graph.add_node(node,
                           label='%s (%d)' % (stmt_str, len(stmt.evidence)),
                           **agent_style)
    pgv_graph.add_edges_from(nx_graph.edges())
    return pgv_graph
示例#10
0
 def loadGraph(self, path):
     #G = nx.Graph(nx.drawing.nx_agraph.read_dot(path))
     B = pgv.AGraph(path)
     B.layout(prog = 'circo') # layout with default (neato)
     B.draw(path[:-3]+'png') # draw png
示例#11
0
        # add any attributes to the flattened graph
        attr = dict(nbg.get_edge(_src, _dest).attr)

        e = g.get_edge(s, d)
        for k, v in attr.iteritems():
            e.attr[k] = v

    return g


if __name__ == "__main__":
    ''' mini test script '''

    # load test nbg file (including layers)
    f_nbg = "graz_setup.nbg"
    nbg = pgv.AGraph(os.path.join("../", "2way_virt", f_nbg))

    # flatten /strip out layers
    g2 = flatten_AGraph(nbg)
    # pick an arbitrary casu and show info
    casu = g2.nodes()[0]
    imap = get_inmap(g2, casu)
    omap = get_outmap(g2, casu)

    print casu, imap, omap

    # also show comparisons of layered vs flattened graphs
    if 1:
        print "=" * 60
        show_inout(nbg)
        print "\n\n"
示例#12
0
from networkx.drawing.nx_agraph import write_dot
import networkx as nx
import pygraphviz as pgv
dotpath = '/Users/iqbal/MLGD/mlgd/datasets/topics/set2/input/Topics_Layer_8.dot'
G = nx.Graph(pgv.AGraph(dotpath))

weightededgedot = open('Layer8.js', 'w')  # as csv_file:

e = "\"source\":\"{}\", \"target\":\"{}\", \"weight\":{}"

#854 -- 3860[weight="7"];
#
#v ='''[label:"immunology", level=1, weight="2783" ,  height=0.56, width=2.33, fontsize= 30, fontname="Arial"];'''
v = '"id":"{}", "label":"{}", "level":{}, "weight":{} ,  "height":{}, "width":{}, "fontsize": {}, "fontname":"{}"'
nlist = ""
for n in G.nodes():
    nlist = nlist + "{ " + v.format(n, G.node[n]["label"], G.node[n]["level"],
                                    G.node[n]["weight"], G.node[n]["height"],
                                    G.node[n]["width"], G.node[n]["fontsize"],
                                    G.node[n]["fontname"]) + " },\n"

nlist = nlist[:len(nlist) - 3] + "}"
eid = 0
elist = ""
for edge in G.edges():
    elist = elist + "{" + e.format(edge[0], edge[1],
                                   G[edge[0]][edge[1]]["weight"]) + "},\n"
    eid = eid + 1
elist = elist[:len(elist) - 3] + "}"

weightededgedot.write("var graph={ \"nodes\":[ " + nlist +
示例#13
0
import pygraphviz as pgv
import os
import sys

reload(sys)
sys.setdefaultencoding("utf-8")

data_file = sys.argv[1]
G = pgv.AGraph(directed=True,
               center="true",
               overlap="false",
               len="f",
               splines="false")
fp = open("./" + data_file + "gfile.txt", "r")
lines = fp.readlines()

for line in lines:
    line = line.strip("\n")
    conf_subj_relation_object = line.split("\t")

    G.add_node(conf_subj_relation_object[0])
    G.add_node(conf_subj_relation_object[2])
    G.add_edge(conf_subj_relation_object[0], conf_subj_relation_object[2])
    edge = G.get_edge(conf_subj_relation_object[0],
                      conf_subj_relation_object[2])
    edge.attr['label'] = conf_subj_relation_object[1]
    edge.attr['weight'] = conf_subj_relation_object[3]
    #edge.attr['penwidth'] = float(conf_subj_relation_object[3])*10

    if len(conf_subj_relation_object[1].split()) < 4:
        edge.attr['len'] = len(conf_subj_relation_object[1].split()) * 2
lstFopMixVersion = sorted(glob.glob(fopSampleGraph + '*/'))
distanceHeader = 33

for i in range(0, len(lstFopMixVersion)):
    fpItemJson = lstFopMixVersion[i] + 'a_json.txt'
    fpItemJsonDot = lstFopMixVersion[i] + 'a_json.dot'
    fpItemJsonPng = lstFopMixVersion[i] + 'a_json.png'
    fpItemCpp = lstFopMixVersion[i] + '_a_code.cpp'
    f1 = open(fpItemCpp, 'r')
    arrCodes = f1.read().strip().split('\n')
    f1.close()
    f1 = open(fpItemJson, 'r')
    strContent = f1.read()
    f1.close()
    dictJsonAll = ast.literal_eval(strContent)
    graphItCompact = pgv.AGraph(directed=True)
    generateGraphCompact(dictJsonAll, '', arrCodes, -1, True, graphItCompact)
    graphItCompact.write(fpItemJsonDot)
    graphItCompact.layout(prog='dot')
    graphItCompact.draw(fpItemJsonPng)

    lstFopItemGraphs = sorted(glob.glob(lstFopMixVersion[i] + 'v_*_graphs/'))
    lstFpItemGraph = []
    for fop in lstFopItemGraphs:
        lstFpDot = sorted(glob.glob(fop + '*.dot'))
        for fpItem in lstFpDot:
            lstFpItemGraph.append(fpItem)

    for j in range(0, len(lstFpItemGraph)):
        fpItemMixDot = lstFpItemGraph[j]
        fpItemMixPng = lstFpItemGraph[i].replace('.dot', '_compact.png')
示例#15
0
def processNmapFile(inputNmapXmlFile, outputDotPngFile, skipDestHost,
                    destIpToNetwork):
    nmap_start = """<?xml version="1.0" encoding="UTF-8"?>
  <!DOCTYPE nmaprun>
  <nmaprun scanner="nmap" args="nmap">
  """
    nmap_end = "</nmaprun>"
    counter = 0
    topology = pygraphviz.AGraph()
    lastaddr = ""
    # Read xml file and split into separate small xml nmap files (to process large nmap files)
    host = False  # Flag for being inside a host in the XML report
    nmap_host = ""  # String with nmap XML report for a host
    pattern_start = re.compile("^<host starttime", re.IGNORECASE)
    pattern_stop = re.compile("^</host>", re.IGNORECASE)
    pattern_down = re.compile("^<host><status state=\"down\"", re.IGNORECASE)
    skip_nmap_host = False  # Set when a host is identified in the XML file which is down

    with open(inputNmapXmlFile, 'rt') as nmap_xml_report:
        for line in nmap_xml_report:
            if skip_nmap_host:
                if pattern_stop.search(
                        line
                ) != None:  # If a match is found (=> end of host section)
                    skip_nmap_host = False
                continue
            if pattern_down.search(
                    line) != None:  # If a match is found (=> host is down)
                skip_nmap_host = True
            if pattern_start.search(
                    line
            ) != None:  # If a match is found  (=> start of host section)
                nmap_host = nmap_start
                nmap_host += line
                host = True
            if pattern_start.search(line) == None and pattern_stop.search(
                    line) == None:  # If nothing matches (=> inside host)
                nmap_host += line
            if pattern_stop.search(
                    line
            ) != None:  # If a match is found (=> end of host section)
                nmap_host += line
                nmap_host += nmap_end
                host = False
                counter = counter + 1
                print("[*] Processing document " + str(counter))
                nmap = untangle.parse(nmap_host)

                for run in nmap.nmaprun.host:
                    #print(run.address["addr"])
                    # check for traceroute info
                    if not "trace" in dir(run) or not "hop" in dir(run.trace):
                        print("    No trace information for host")
                        continue
                    hops = run.trace.hop

                    # check if last hop equals to target host (else skip due to incomplete traceroute)
                    if run.address["addr"] != hops[len(hops) - 1]["ipaddr"]:
                        print("    Incomplete traceroute for host")
                        continue

                    # Workaround: nmap only records identified hops - those that could not
                    #             be identified are not present (e.g. *). But the ttl
                    #             is stored with each identified hop (e.g. ttl="2"). Therefore,
                    #             empty hops are refilled and a counter is used for identification of
                    #             missing hops.

                    # insert unknown, but missing hops
                    ttlCounter = 1
                    emptyHop = {
                        "ttl": 0,
                        "ipaddr": "unknown",
                    }
                    # list of dictionaries (hops -> hop)
                    completeHops = []
                    for hop in hops:
                        ttl = hop["ttl"]
                        # hop is not present
                        while str(ttl) != str(ttlCounter):
                            emptyHop["ttl"] = ttlCounter
                            completeHops.append(copy.deepcopy(emptyHop))
                            ttlCounter = ttlCounter + 1
                        if ttl == str(ttlCounter):
                            completeHops.append(hop)
                            ttlCounter = ttlCounter + 1

                    # Only gather traceroutes for hosts with more than 1 hop
                    # Workaround due to nmap outputting one hop traceroute
                    # although actually no trace was identified.
                    if len(completeHops) > 1:
                        hops = completeHops
                        # Skip the last system in the traceroute as only the route
                        # is of interest not the destination system itself.
                        if skipDestHost:
                            hops = hops[:-1]
                        for hop in hops:
                            ip = str(hop["ttl"]) + "\n" + hop["ipaddr"]
                            # The hop can be a dictionary or an untangled.element
                            # due to the function that appends "unknown" hops
                            # * untangle.Element
                            # * dict
                            if (isinstance(hop,
                                           dict)) and hop.get("host") != None:
                                ip += "\n" + hop["host"]
                            # untangled.element
                            if (isinstance(hop, untangle.Element)
                                ) and hop.get_attribute("host") != None:
                                ip += "\n" + hop["host"]
                            ttl = hop["ttl"]
                            # first hop
                            if str(ttl) == str(1):
                                print("    " + str(" - ".join(ip.split("\n"))))
                                topology.add_edge("scanner", ip)
                                lastaddr = ip
                            # hop on the route
                            else:
                                # check if last hop and set to /24 network
                                if destIpToNetwork and str(ttl) == str(
                                        hops[-1]['ttl']):
                                    octets = ip.split('.')
                                    ip = str(octets[0]) + "." + str(
                                        octets[1]) + "." + str(
                                            octets[2]) + ".0/24"
                                print("    " + str(" - ".join(ip.split("\n"))))
                                topology.add_edge(lastaddr, ip)
                                lastaddr = ip

    #write our output:
    topology.write(outputDotPngFile + '.dot')
    #dot - filter for drawing directed graphs
    #neato - filter for drawing undirected graphs
    #twopi - filter for radial layouts of graphs
    #circo - filter for circular layout of graphs
    #fdp - filter for drawing undirected graphs
    #sfdp - filter for drawing large undirected graphs
    topology.layout(prog='dot')  # use which layout from the list above^
    topology.draw(outputDotPngFile + '.png')
示例#16
0
def main():
    # Import data
    x_train, y_train = load_split_all()[0]
    data = x_train
    labels = y_train

    num_positives = np.count_nonzero(labels)
    num_negatives = len(labels) - num_positives
    # num_positives is max false negatives
    # num_negatives is max false positives - append these to fitness so we have reliable AuC
    fn_trivial_fitness = (0, num_positives)
    fp_trivial_fitness = (num_negatives, 0)

    creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0))
    creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin)

    # Arguments
    random.seed(25)
    crossover_rate = 0.5
    mutation_rate = 0.2
    samples = 10  # set to 10 when generating submission data
    calc_area = False  # set to true when generating submission data

    input_types = []
    for i in range(x_train.shape[1]):  # multiplication op doesn't work
        input_types.append(float)
    pset = gp.PrimitiveSetTyped("MAIN", input_types, bool)

    # Essential Primitives
    pset.addPrimitive(is_greater, [float, float], bool)
    pset.addPrimitive(is_equal_to, [float, float], bool)
    pset.addPrimitive(if_then_else, [bool, float, float], float)

    pset.addPrimitive(np.logical_not, [bool], bool)
    pset.addPrimitive(
        np.logical_and, [bool, bool],
        bool)  # Demorgan's rule says all logic ops can be made with not & and

    pset.addPrimitive(np.negative, [float], float)
    pset.addPrimitive(operator.mul, [float, float], float)
    pset.addPrimitive(operator.add, [float, float], float)
    pset.addPrimitive(operator.sub, [float, float], float)

    # constants
    # pset.addTerminal(1.0, float)
    pset.addTerminal(2.0, float)
    pset.addTerminal(10.0, float)
    pset.addTerminal(25.0, float)
    pset.addTerminal(1, bool)  # Necessary for valid compilation
    pset.addTerminal(0,
                     bool)  # Though I'd like to discourage, boosts performance

    # More primitives (for fun/tinkering/reducing verbosity of tree)
    # Complex ops
    # pset.addPrimitive(equal_conditional, [bool, bool, float, float], float)
    # Logic to float

    # Float to logic
    # pset.addPrimitive(in_range, [float, float, float], bool)

    # Logic to logic
    pset.addPrimitive(operator.xor, [bool, bool], bool)

    # Float to float
    pset.addPrimitive(relu, [float], float)
    # pset.addPrimitive(absolute, [float], float)
    # pset.addPrimitive(safe_division, [float, float], float)
    pset.addPrimitive(math.floor, [float], int)

    # Visualizing aids
    pset.renameArguments(ARG0='pclass')
    pset.renameArguments(ARG1='sex')
    pset.renameArguments(ARG2='age')
    pset.renameArguments(ARG3='sibsp')
    pset.renameArguments(ARG4='parch')
    pset.renameArguments(ARG5='fare')
    # pset.renameArguments(ARG6='embarked')
    min_init = 1
    max_init = 4
    toolbox = base.Toolbox()
    toolbox.register("expr",
                     gp.genGrow,
                     pset=pset,
                     min_=min_init,
                     max_=max_init)
    toolbox.register("individual", tools.initIterate, creator.Individual,
                     toolbox.expr)
    toolbox.register("population", tools.initRepeat, list, toolbox.individual)
    toolbox.register("compile", gp.compile, pset=pset)

    toolbox.register("evaluate",
                     bloatControlEval,
                     pset=pset,
                     data=data,
                     labels=labels)
    # select
    # toolbox.register("select", tools.selTournament, tournsize=3)
    toolbox.register("select", tools.selWorst)  # added

    # crossover
    toolbox.register("mate", gp.cxOnePoint)
    # mutate
    toolbox.register("expr_mut", gp.genFull, min_=0, max_=2)
    toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset)

    toolbox.decorate(
        "mate", gp.staticLimit(key=operator.attrgetter("height"),
                               max_value=17))
    toolbox.decorate(
        "mutate",
        gp.staticLimit(key=operator.attrgetter("height"), max_value=17))

    gen = range(40)
    avg_list = []
    max_list = []
    min_list = []
    population_size = 300
    pop = toolbox.population(n=population_size)
    fitnesses = list(map(toolbox.evaluate, pop))
    for ind, fit in zip(pop, fitnesses):
        ind.fitness.values = fit

    avg_areas = [
        0 for g in gen
    ]  # contains sum of performances per generation (averaged later)
    # for i in range(samples):  # sample 10 times
    # reset population at the start of each trial
    pop = toolbox.population(n=300)
    fitnesses = list(map(toolbox.evaluate, pop))
    for ind, fit in zip(pop, fitnesses):
        ind.fitness.values = fit
    # Begin the evolution
    for g in gen:
        if g > 30:
            toolbox.register("evaluate",
                             evalSymbReg,
                             pset=pset,
                             data=data,
                             labels=labels)
        else:
            toolbox.register("evaluate",
                             bloatControlEval,
                             pset=pset,
                             data=data,
                             labels=labels)
        # Select the next generation individuals
        offspring = toolbox.select(pop, len(pop))
        # Clone the selected individuals
        offspring = list(map(toolbox.clone, offspring))

        # Apply crossover and mutation on the offspring
        for child1, child2 in zip(offspring[::2], offspring[1::2]):
            if random.random() < crossover_rate:
                toolbox.mate(child1, child2)
                del child1.fitness.values
                del child2.fitness.values

        for mutant in offspring:
            if random.random() < mutation_rate:
                toolbox.mutate(mutant)
                del mutant.fitness.values

        # Evaluate the individuals with an invalid fitness .... define invalid???
        invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
        fitnesses = map(toolbox.evaluate, invalid_ind)
        for ind, fit in zip(invalid_ind, fitnesses):
            ind.fitness.values = fit

        # Replace population
        pop[:] = offspring

        # Gather all the fitnesses in one list and print the stats
        fits = [ind.fitness.values[0] for ind in pop]

        length = len(pop)
        mean = sum(fits) / length
        sum2 = sum(x * x for x in fits)
        std = abs(sum2 / length - mean**2)**0.5
        g_max = max(fits)
        g_min = min(fits)

        avg_list.append(mean)
        max_list.append(g_max)
        min_list.append(g_min)

        # print("  Min %s" % g_min)
        # print("  Max %s" % g_max)
        # print("  Avg %s" % mean)
        # print("  Std %s" % std)

        # find area under curve for population
        if calc_area:
            # Evaluate our true fitnesses (sans bloat control)
            toolbox.register("evaluate",
                             evalSymbReg,
                             pset=pset,
                             data=data,
                             labels=labels)
            fitnesses = list(map(toolbox.evaluate, pop))
            for ind, fit in zip(pop, fitnesses):
                ind.fitness.values = fit

            hof_pop = generate_min_front(pop)
            # Extract fitnesses and sort so HoF draws correctly
            hof = np.asarray([ind.fitness.values for ind in hof_pop])
            hof = np.insert(hof, 0, [fp_trivial_fitness, fn_trivial_fitness],
                            0)
            hof = hof[np.argsort(hof[:, 0])]
            area = area_under_curve(hof)
            avg_areas[g] += area
            info = "\t\tAUC: %f" % area
        else:
            info = ""
        print("-- Generation %i --%s" % (g, info))

    print("-- End of (successful) evolution --")

    if calc_area:
        # average the areas
        avg_areas = [area / samples for area in avg_areas]
        # write to csv
        file = open("results/driver_results.csv", 'w')
        header = ','
        driver_line = "Driver,"
        for g in gen:
            header += "%d," % i
            driver_line += "%f," % avg_areas[g]
        header += "\n"
        file.write(header)
        file.write(driver_line)
        file.close()

    # Evaluate our true fitnesses (sans bloat control)
    toolbox.register("evaluate",
                     evalSymbReg,
                     pset=pset,
                     data=data,
                     labels=labels)
    fitnesses = list(map(toolbox.evaluate, pop))
    for ind, fit in zip(pop, fitnesses):
        ind.fitness.values = fit
    hof_pop = generate_min_front(pop)
    # Extract fitnesses and sort so HoF draws correctly
    hof = np.asarray([ind.fitness.values for ind in hof_pop])
    hof = np.insert(hof, 0, [fp_trivial_fitness, fn_trivial_fitness], 0)
    hof = hof[np.argsort(hof[:, 0])]
    # print(hof)

    # Charts
    pop_1 = [ind.fitness.values[0] for ind in pop]
    pop_2 = [ind.fitness.values[1] for ind in pop]

    plt.scatter(pop_1, pop_2, color='b')
    plt.scatter(hof[:, 0], hof[:, 1], color='r')
    plt.plot(hof[:, 0], hof[:, 1], color='r', drawstyle='steps-post')
    plt.xlabel("False Positives")
    plt.ylabel("False Negatives")
    plt.title("Pareto Front")
    print(area_under_curve(hof))

    if calc_area:
        print(avg_areas[-1])
    else:
        print(area_under_curve(hof))
    plt.show()
    if calc_area:
        plt.plot(gen, avg_areas, color='g')
        plt.xlabel("Generation")
        plt.ylabel("Area Under Curve")
        plt.title("AUC evolution")
        plt.show()

    print("Generating individual graphs")
    for k in range(len(hof_pop)):
        best_ind = hof_pop[k]
        nodes, edges, labels = gp.graph(best_ind)
        g = pgv.AGraph()
        g.add_nodes_from(nodes)
        g.add_edges_from(edges)
        g.layout(prog="dot")

        for i in nodes:
            n = g.get_node(i)
            n.attr["label"] = labels[i]

        g.draw("graphs/tree%s.pdf" % k)
示例#17
0
        decode_pkt(sec + usec / 1000000., pkt)

for meter in sorted(meter_readings.keys()):
    log.info("Readings for LAN ID " + str(meter) + ":")
    if meter_first_hour[meter] > meter_last_hour[meter]:
        meter_last_hour[meter] += 65536
    meter_readings_str = ''
    for hour in range(meter_first_hour[meter], meter_last_hour[meter] + 1):
        meter_readings_str += ("{0:5.2f}".format(
            meter_readings[meter][hour % 65536] /
            100.0) if meter_readings[meter][hour % 65536] >= 0 else "   ? ")
    log.info(meter_readings_str)

import pygraphviz

G = pygraphviz.AGraph(directed=True, ranksep=2.0, rankdir="RL")

for meter, parent in meter_parents.iteritems():
    meter_name = "{0:08x}".format(meter)
    parent_name = "{0:08x}".format(parent)
    if parent & 0x80000000:
        G.add_node(parent_name, color="red", rank="max")
    G.add_edge(meter_name, parent_name)

    if (meter_levels[parent] >= 2) and (parent not in meter_parents):
        gatekeeper_name = "{0:08x}".format(meter_gatekeepers[meter])

        G.add_node(gatekeeper_name, color="red", rank="max")
        G.add_node("Level 1\n(" + gatekeeper_name + ")", color="gray")
        G.add_edge("Level 1\n(" + gatekeeper_name + ")", gatekeeper_name)
        for x in range(1, meter_levels[parent] - 1):
示例#18
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Example showing use of unicode and UTF-8 encoding
"""

from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division

import pygraphviz as pgv

A = pgv.AGraph(encoding='UTF-8')
A.add_node(1, label='plain string')
A.add_node(2, label='unicode')

hello = 'Здравствуйте!'
A.add_node(3, label=hello)

hello = '\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435!'
A.add_node(4, label=hello)

goodbye = "До свидания"
A.add_edge(1, hello, key=goodbye)

A.add_edge("שלום", hello)
A.add_edge(1, "こんにちは")

print(A)
A.write('utf8.dot')
def general_callback(data):
    AG = pg.AGraph(directed=False, strict=True)
    AG.layout(prog='dot')

    rospy.loginfo("General node reading starts..")

    count_robot = 0
    count_primitive = 0
    count_target = 0

    for gen_node_id in data.gen_nodes:
        if gen_node_id.type == "robot":
            count_robot = count_robot + 1
        if gen_node_id.type == "red":
            count_primitive = count_primitive + 1
        if gen_node_id.type == "target":
            count_target = count_target + 1

    rospy.loginfo("Number of robot general nodes = %d", count_robot)
    rospy.loginfo("Number of primitive general nodes = %d", count_primitive)
    rospy.loginfo("Number of target general nodes = %d", count_target)

    interval_nodes = 150

    if count_robot % 2 == 0:
        pos_robot_start = -1 * (count_robot / 2 -
                                1) * interval_nodes - interval_nodes / 2
    else:
        pos_robot_start = -1 * (count_robot - 1) / 2 * interval_nodes
    if count_primitive % 2 == 0:
        pos_primitive_start = -1 * (count_primitive / 2 -
                                    1) * interval_nodes - interval_nodes / 2
    else:
        pos_primitive_start = -1 * (count_primitive - 1) / 2 * interval_nodes
    if count_target % 2 == 0:
        pos_target_start = -1 * (count_target / 2 -
                                 1) * interval_nodes - interval_nodes / 2
    else:
        pos_target_start = -1 * (count_target - 1) / 2 * interval_nodes

    inc_robot = 0
    inc_primitive = 0
    inc_target = 0

    # Nodes for general graph
    for gen_node_id in data.gen_nodes:
        if gen_node_id.type == "robot":
            temp_node_name = gen_node_id.type + str(gen_node_id.id)
            AG.add_node(temp_node_name)
            n = AG.get_node(temp_node_name)
            n.attr['pos'] = "%f,%f)" % (pos_robot_start +
                                        inc_robot * interval_nodes, 75)
            inc_robot += 1
        if gen_node_id.type == "red":
            temp_node_name = "primitive" + str(gen_node_id.id)
            AG.add_node(temp_node_name)
            n = AG.get_node(temp_node_name)
            n.attr['pos'] = "%f,%f)" % (pos_primitive_start +
                                        inc_primitive * interval_nodes, 0)
            inc_primitive += 1
        if gen_node_id.type == "target":
            temp_node_name = gen_node_id.type + str(gen_node_id.id)
            AG.add_node(temp_node_name)
            n = AG.get_node(temp_node_name)
            n.attr['pos'] = "%f,%f)" % (pos_target_start +
                                        inc_target * interval_nodes, -75)
            inc_target += 1

    # Edges for general graph
    for gen_node_id in data.gen_nodes:
        if gen_node_id.type == "robot":
            count_robot_loc = 0
            for loc_neighbor in gen_node_id.loc_neighbor:
                temp_node_name = "robot" + str(gen_node_id.id)
                temp_neighbor_node_name = "primitive" + str(loc_neighbor)
                AG.add_edge(temp_node_name, temp_neighbor_node_name)
                e = AG.get_edge(temp_node_name, temp_neighbor_node_name)
                e.attr['weight'] = gen_node_id.loc_edge_weight[count_robot_loc]
                temp_weight = gen_node_id.loc_edge_weight[count_robot_loc]
                temp_weight = float("{0:.2f}".format(temp_weight))
                e.attr['label'] = str(temp_weight)
                count_robot_loc += 1
        if gen_node_id.type == "target":
            count_target_loc = 0
            for loc_neighbor in gen_node_id.loc_neighbor:
                temp_node_name = "target" + str(gen_node_id.id)
                temp_neighbor_node_name = "primitive" + str(loc_neighbor)
                AG.add_edge(temp_node_name, temp_neighbor_node_name)
                e = AG.get_edge(temp_node_name, temp_neighbor_node_name)
                e.attr['weight'] = gen_node_id.loc_edge_weight[
                    count_target_loc]
                temp_weight = gen_node_id.loc_edge_weight[count_target_loc]
                temp_weight = float("{0:.2f}".format(temp_weight))
                e.attr['label'] = str(temp_weight)
                count_target_loc += 1

    rospack = rospkg.RosPack()
    AG.write(
        rospack.get_path('max_min_lp_visualization') +
        "/log/general_graph.dot")
    AG.draw(rospack.get_path('max_min_lp_visualization') +
            "/log/general_graph.png",
            prog='neato',
            args='-n2')
示例#20
0
def build_pvgraph(nodes, edges, nodecolordict, edgecolordict, index_true,
                  truepathway, fontsize, circles=None, circlelabels=False,
                  diamondlabels=True, directed=False, labeldict={}):
    
    G = pgv.AGraph(strict=False, directed=directed, rankdir="LR", 
                   center=1, outputorder="edgesfirst", nodesep=0.05)
    if circles is None:
        circles = nodecolordict.keys()
    if labeldict == {}:
        for node in nodes:
            labeldict[node] = node
    for node in nodes:
        la = node
        if la in circles:
            color = nodecolordict[la]
            fillcolor = color
            penwidth = 1
            if node in truepathway:
                color = 'black'
                penwidth = 3
            if circlelabels:
                xlabel = labeldict[node]
            else:
                xlabel = ''
            G.add_node(node, width=0.3, height=0.3, shape="circle", label='',
                       xlabel=xlabel, fontname="helvetica", color=color, 
                       fillcolor=fillcolor, style="filled", fontsize=fontsize,
                       penwidth=penwidth)
        else:
            if la in nodecolordict.keys():
                color = nodecolordict[la]
                fillcolor = color
            else:
                fillcolor = 'white'
                color = 'black'
            fontcolor = 'black'
            if node in truepathway:
                color = 'black'
                fontcolor = 'white'
            if diamondlabels:
                label = labeldict[node]
            else:
                label = ''
            G.add_node(node, width=0.6, height=0.6, shape="diamond",
                       label=label, fontsize=fontsize, margin=0.001, 
                       fontname="helvetica", fontcolor=fontcolor, 
                       fillcolor=fillcolor, style="filled", color=color)
   
    if index_true is None:
        defaultcolor='black'
    else:
        defaultcolor='#008B8B'

    for edge in edges.keys():
        edgevals = edges[edge]
        colors = ''
        penwidth = 2
        for val in edgevals:
            if edgecolordict is not None:
                colors += '%s:' % edgecolordict[val]
            elif colors == '' and val != index_true:
                colors += '%s:' % defaultcolor

        for val in edgevals:
            if val == index_true:
                penwidth = 5
                colors += 'black:'
        colors.rstrip(':')
        G.add_edge(edge, color=colors, penwidth=penwidth)
    return G
def test_layout():
    A = pgv.AGraph(name='test graph')
    A.add_path([1, 2, 3, 4])
    A.layout()
    assert_equal(['pos' in n.attr for n in A.nodes()],
                 [True, True, True, True])
示例#22
0
                            if leaf in ancestry_dict[node]]
        print leaf, possible_parents, "MMM"
        parent = min(possible_parents, key=lambda z: z[1])[0]
        print leaf, parent, "PARENT"
        parents[leaf] = parent
    for node in ancestry_dict:
        #lifs = [x for x in ancestry_dict[node] if x in leaves]
        #for l in lifs:
        #    parents[l] = node
        ancestry_dict[node] = [
            x for x in ancestry_dict[node] if x not in leaves
        ]

print "BEFORE GRAPH"

A = PG.AGraph(directed=True, strict=True)

for u, v in parents.items():
    A.add_edge("M" + str(v), "M" + str(u))
    print "ADDING EDGE", "M" + str(v), "M" + str(u)

for i in range(len(attachments[0])):
    for j in range(len(attachments)):
        if attachments[j][i] == 1:
            A.add_edge("M" + str(j), "S" + str(i + 1))

# add losses
loss_dict = {}
for i in range(len(losses)):
    for j in range(len(losses[0])):
        if losses[i][j] == 1:
def do_all_the_things(ram_filename, bin_filename, map_filename, print_block_contents,
                      print_unknown_types, print_block_state, print_conflicting_symbols,
                      print_heap_structure, output_directory, draw_heap_layout,
                      draw_heap_ownership, analyze_snapshots):
    with open(ram_filename, "rb") as f:
        ram_dump = f.read()

    with open(bin_filename, "rb") as f:
        rom = f.read()

    symbols = {} # name -> address, size
    symbol_lookup = {} # address -> name
    manual_symbol_map = {} # autoname -> name

    def add_symbol(name, address=None, size=None):
        if "lto_priv" in name:
            name = name.split(".")[0]
        if address:
            address = int(address, 0)
        if size:
            size = int(size, 0)
        if name in symbols:
            if address and symbols[name][0] and symbols[name][0] != address:
                if print_conflicting_symbols:
                    print("Conflicting symbol: {} at addresses 0x{:08x} and 0x{:08x}".format(name, address, symbols[name][0]))
                return
            if not address:
                address = symbols[name][0]
            if not size:
                size = symbols[name][1]
        symbols[name] = (address, size)
        if address:
            if not size:
                size = 4
            for offset in range(0, size, 4):
                symbol_lookup[address + offset] = "{}+{}".format(name, offset)

    with open(map_filename, "r") as f:
        common_symbols = False
        name = None
        for line in f:
            line = line.strip()
            parts = line.split()
            if line.startswith("Common symbol"):
                common_symbols = True
            if line == "Discarded input sections":
                common_symbols = False
            if common_symbols:
                if len(parts) == 1:
                    name = parts[0]
                elif len(parts) == 2 and name:
                    add_symbol(name, size=parts[0])
                    name = None
                elif len(parts) == 3:
                    add_symbol(parts[0], size=parts[1])
                    name = None
            else:
                if len(parts) == 1 and parts[0].startswith((".text", ".rodata", ".bss")) and parts[0].count(".") > 1 and not parts[0].isnumeric() and ".str" not in parts[0]:
                    name = parts[0].split(".")[2]
                if len(parts) == 3 and parts[0].startswith("0x") and parts[1].startswith("0x") and name:
                    add_symbol(name, parts[0], parts[1])
                    name = None
                if len(parts) == 2 and parts[0].startswith("0x") and not parts[1].startswith("0x"):
                    add_symbol(parts[1], parts[0])
                if len(parts) == 4 and parts[0] not in SKIP_SYMBOLS and parts[1].startswith("0x") and parts[2].startswith("0x"):
                    name, address, size, source = parts
                    if name.startswith((".text", ".rodata", ".bss")) and name.count(".") > 1:
                        name = name.split(".")[-1]
                        add_symbol(name, address, size)
                    name = None
                # Linker symbols
                if len(parts) >= 4 and parts[0].startswith("0x") and parts[2] == "=" and parts[1] != ".":
                    add_symbol(parts[1], parts[0])

    rom_start = symbols["_sfixed"][0]
    ram_start = symbols["_srelocate"][0]
    ram_end = symbols["_estack"][0]
    ram_length = ram_end - ram_start
    if analyze_snapshots == "all":
        snapshots = range(len(ram_dump) // ram_length - 1, -1, -1)
    elif analyze_snapshots == "last":
        snapshots = range(len(ram_dump) // ram_length - 1, len(ram_dump) // ram_length - 2, -1)
    for snapshot_num in snapshots:
        ram = ram_dump[ram_length*snapshot_num:ram_length*(snapshot_num + 1)]

        ownership_graph = pgv.AGraph(directed=True)
        def load(address, size=4):
            if size is None:
                raise ValueError("You must provide a size")
            if address > ram_start:
                ram_address = address - ram_start
                if (ram_address + size) > len(ram):
                    raise ValueError("Unable to read 0x{:08x} from ram.".format(address))
                return ram[ram_address:ram_address+size]
            elif address < len(rom):
                if (address + size) > len(rom):
                    raise ValueError("Unable to read 0x{:08x} from rom.".format(address))
                return rom[address:address+size]

        def load_pointer(address):
            return struct.unpack("<I", load(address))[0]

        heap_start, heap_size = symbols["heap"]
        heap = load(heap_start, heap_size)
        total_byte_len = len(heap)

        # These change every run so we load them from the symbol table
        mp_state_ctx = symbols["mp_state_ctx"][0]
        manual_symbol_map["mp_state_ctx+20"] = "mp_state_ctx.vm.last_pool"
        last_pool = load_pointer(mp_state_ctx + 20) # (gdb) p &mp_state_ctx.vm.last_pool
        manual_symbol_map["mp_state_ctx+88"] = "mp_state_ctx.vm.dict_main.map.table"
        dict_main_table = load_pointer(mp_state_ctx + 88) # (gdb) p &mp_state_ctx.vm.dict_main.map.table
        manual_symbol_map["mp_state_ctx+68"] = "mp_state_ctx.vm.mp_loaded_modules_dict.map.table"
        imports_table = load_pointer(mp_state_ctx + 68) # (gdb) p &mp_state_ctx.vm.mp_loaded_modules_dict.map.table

        manual_symbol_map["mp_state_ctx+104"] = "mp_state_ctx.vm.mp_sys_path_obj.items"
        manual_symbol_map["mp_state_ctx+120"] = "mp_state_ctx.vm.mp_sys_argv_obj.items"

        for i in range(READLINE_HIST_SIZE):
            manual_symbol_map["mp_state_ctx+{}".format(128 + i * 4)] = "mp_state_ctx.vm.readline_hist[{}]".format(i)

        tuple_type = symbols["mp_type_tuple"][0]
        type_type = symbols["mp_type_type"][0]
        map_type = symbols["mp_type_map"][0]
        dict_type = symbols["mp_type_dict"][0]
        property_type = symbols["mp_type_property"][0]
        str_type = symbols["mp_type_str"][0]
        function_types = [symbols["mp_type_fun_" + x][0] for x in ["bc", "builtin_0", "builtin_1", "builtin_2", "builtin_3", "builtin_var"]]
        bytearray_type = symbols["mp_type_bytearray"][0]

        dynamic_type = 0x40000000 # placeholder, doesn't match any memory

        type_colors = {
            dict_type: "red",
            property_type: "yellow",
            map_type: "blue",
            type_type: "orange",
            tuple_type: "skyblue",
            str_type: "pink",
            bytearray_type: "purple"
            }

        pool_shift = heap_start % BYTES_PER_BLOCK
        atb_length = total_byte_len * BITS_PER_BYTE // (BITS_PER_BYTE + BITS_PER_BYTE * BLOCKS_PER_ATB // BLOCKS_PER_FTB + BITS_PER_BYTE * BLOCKS_PER_ATB * BYTES_PER_BLOCK)
        pool_length = atb_length * BLOCKS_PER_ATB * BYTES_PER_BLOCK
        gc_finaliser_table_byte_len = (atb_length * BLOCKS_PER_ATB + BLOCKS_PER_FTB - 1) // BLOCKS_PER_FTB

        if print_heap_structure:
            print("mp_state_ctx at 0x{:08x} and length {}".format(*symbols["mp_state_ctx"]))
            print("Total heap length:", total_byte_len)
            print("ATB length:", atb_length)
            print("Total allocatable:", pool_length)
            print("FTB length:", gc_finaliser_table_byte_len)

        pool_start = heap_start + total_byte_len - pool_length - pool_shift
        pool = heap[-pool_length-pool_shift:]

        total_height = 65 * 18
        total_width = (pool_length // (64 * 16)) * 90

        map_element_blocks = [dict_main_table, imports_table]
        string_blocks = []
        bytecode_blocks = []
        qstr_pools = []
        qstr_chunks = []
        block_data = {}

        # Find all the qtr pool addresses.
        prev_pool = last_pool
        while prev_pool > ram_start:
            qstr_pools.append(prev_pool)
            prev_pool = load_pointer(prev_pool)

        def save_allocated_block(end, current_allocation):
            allocation_length = current_allocation * BYTES_PER_BLOCK
            start = end - allocation_length
            address = pool_start + start
            data = pool[start:end]
            if print_block_state:
                print("0x{:x} {} bytes allocated".format(address, allocation_length))
            if print_block_contents:
                print(data)

            rows = ""
            for k in range(current_allocation - 1):
                rows += "<tr>"
                for l in range(4):
                    rows += "<td port=\"{}\" height=\"18\" width=\"20\"></td>".format(4 * (k + 1) + l)
                rows += "</tr>"
            table = "<<table bgcolor=\"gray\" border=\"1\" cellpadding=\"0\" cellspacing=\"0\"><tr><td colspan=\"4\" port=\"0\" height=\"18\" width=\"80\">0x{:08x}</td></tr>{}</table>>".format(address, rows)

            ownership_graph.add_node(address, label=table, style="invisible", shape="plaintext")
            potential_type = None
            node = ownership_graph.get_node(address)
            node.attr["height"] = 0.25 * current_allocation
            block_data[address] = data
            for k in range(len(data) // 4):
                word = struct.unpack_from("<I", data, offset=(k * 4))[0]
                if word < 0x00040000 and k == 0 or address in qstr_pools:
                    potential_type = word
                    bgcolor = "gray"
                    if address in qstr_pools:
                        bgcolor = "tomato"
                    elif potential_type in function_types:
                        bgcolor = "green"
                    elif potential_type in type_colors:
                        bgcolor = type_colors[potential_type]
                    elif print_unknown_types:
                        print("unknown type", hex(potential_type))
                    node.attr["label"] = "<" + node.attr["label"].replace("\"gray\"", "\"" + bgcolor + "\"") + ">"

                if potential_type == str_type and k == 3:
                    string_blocks.append(word)


                if potential_type == dict_type:
                    if k == 3:
                        map_element_blocks.append(word)

                if ram_start < word < (ram_start + len(ram)) and word % 16 == 0:
                    port = k
                    if k < 4:
                        port = 0
                    ownership_graph.add_edge(address, word, tailport=str(port)+":_")
                    #print("  0x{:08x}".format(word))
                    if address in qstr_pools:
                        if k > 0:
                            qstr_chunks.append(word)
                    if k == 0:
                        potential_type = dynamic_type


                if potential_type == dynamic_type:
                    if k == 0:
                        node.attr["fillcolor"] = "plum"
                    if k == 3 and 0x20000000 < word < 0x20040000:
                        map_element_blocks.append(word)

                if potential_type in function_types:
                    if k == 2 and 0x20000000 < word < 0x20040000:
                        bytecode_blocks.append(word)


        longest_free = 0
        current_free = 0
        current_allocation = 0
        total_free = 0
        for i in range(atb_length):
            # Each atb byte is four blocks worth of info
            atb = heap[i]
            for j in range(4):
                block_state = (atb >> (j * 2)) & 0x3
                if block_state != AT_FREE and current_free > 0:
                    if print_block_state:
                        print("{} bytes free".format(current_free * BYTES_PER_BLOCK))
                    current_free = 0
                if block_state != AT_TAIL and current_allocation > 0:
                    save_allocated_block((i * BLOCKS_PER_ATB + j) * BYTES_PER_BLOCK, current_allocation)
                    current_allocation = 0
                if block_state == AT_FREE:
                    current_free += 1
                    total_free += 1
                elif block_state == AT_HEAD or block_state == AT_MARK:
                    current_allocation = 1
                elif block_state == AT_TAIL and current_allocation > 0:
                    # In gc_free the logging happens before the tail is freed. So checking
                    # current_allocation > 0 ensures we only extend an allocation thats started.
                    current_allocation += 1
                longest_free = max(longest_free, current_free)
        #if current_free > 0:
        #    print("{} bytes free".format(current_free * BYTES_PER_BLOCK))
        if current_allocation > 0:
            save_allocated_block(pool_length, current_allocation)

        def is_qstr(obj):
            return obj & 0xff800007 == 0x00000006

        def find_qstr(qstr_index):
            pool_ptr = last_pool
            if not is_qstr(qstr_index):
                return "object"
            qstr_index >>= 3
            while pool_ptr != 0:
                if pool_ptr > ram_start:
                    if pool_ptr in block_data:
                        pool = block_data[pool_ptr]
                        prev, total_prev_len, alloc, length = struct.unpack_from("<IIII", pool)
                    else:
                        print("missing qstr pool: {:08x}".format(pool_ptr))
                        return "missing"
                else:
                    rom_offset = pool_ptr - rom_start
                    prev, total_prev_len, alloc, length = struct.unpack_from("<IIII", rom[rom_offset:rom_offset+32])
                    pool = rom[rom_offset:rom_offset+length*4]

                if qstr_index >= total_prev_len:
                    offset = (qstr_index - total_prev_len) * 4 + 16
                    start = struct.unpack_from("<I", pool, offset=offset)[0]
                    if start < heap_start:
                        start -= rom_start
                        if start > len(rom):
                            return "more than rom: {:x}".format(start + rom_start)
                        qstr_hash, qstr_len = struct.unpack("<BB", rom[start:start+2])
                        return rom[start+2:start+2+qstr_len].decode("utf-8")
                    else:
                        if start > heap_start + len(heap):
                            return "out of range: {:x}".format(start)
                        local = start - heap_start
                        qstr_hash, qstr_len = struct.unpack("<BB", heap[local:local+2])
                        return heap[local+2:local+2+qstr_len].decode("utf-8")

                pool_ptr = prev
            return "unknown"

        def format(obj):
            if obj & 1 != 0:
                return obj >> 1
            if is_qstr(obj):
                return find_qstr(obj)
            else:
                return "0x{:08x}".format(obj)

        for block in sorted(map_element_blocks):
            if block == 0:
                continue
            try:
                node = ownership_graph.get_node(block)
            except KeyError:
                print("Unable to find memory block for 0x{:08x}. Is there something running?".format(block))
                continue
            if block not in block_data:
                continue
            data = block_data[block]
            cells = []
            for i in range(len(data) // 8):
                key, value = struct.unpack_from("<II", data, offset=(i * 8))
                if key == MP_OBJ_NULL or key == MP_OBJ_SENTINEL:
                    cells.append(("", " "))
                else:
                    cells.append((key, format(key)))
                    if value in block_data:
                        edge = ownership_graph.get_edge(block, value)
                        edge.attr["tailport"] = str(key)
            rows = ""
            for i in range(len(cells) // 2):
                rows += "<tr><td port=\"{}\">{}</td><td port=\"{}\">{}</td></tr>".format(
                    cells[2*i][0],
                    cells[2*i][1],
                    cells[2*i+1][0],
                    cells[2*i+1][1])
            node.attr["shape"] = "plaintext"
            node.attr["style"] = "invisible"
            node.attr["label"] = "<<table bgcolor=\"gold\" border=\"1\" cellpadding=\"0\" cellspacing=\"0\"><tr><td colspan=\"2\">0x{:08x}</td></tr>{}</table>>".format(block, rows)

        for node, degree in ownership_graph.in_degree_iter():
            if degree == 0:
                address_bytes = struct.pack("<I", int(node))
                location = -1
                for _ in range(ram.count(address_bytes)):
                    location = ram.find(address_bytes, location + 1)
                    pointer_location = ram_start + location
                    source = "0x{:08x}".format(pointer_location)
                    if pointer_location in symbol_lookup:
                        source = symbol_lookup[pointer_location]
                    if source in manual_symbol_map:
                        source = manual_symbol_map[source]
                    if "readline_hist" in source:
                        string_blocks.append(int(node))
                    ownership_graph.add_edge(source, node)

        for block in string_blocks:
            if block == 0:
                continue
            node = ownership_graph.get_node(block)
            node.attr["fillcolor"] = "hotpink"
            if block in block_data:
                raw_string = block_data[block]
            else:
                print("Unable to find memory block for string at 0x{:08x}.".format(block))
                continue
            try:
                raw_string = block_data[block].decode('utf-8')
            except:
                raw_string = str(block_data[block])
            wrapped = []
            for i in range(0, len(raw_string), 16):
                wrapped.append(raw_string[i:i+16])
            node.attr["label"] = "\n".join(wrapped)
            node.attr["style"] = "filled"
            node.attr["fontname"] = "FiraCode-Medium"
            node.attr["fontpath"] = "/Users/tannewt/Library/Fonts/"
            node.attr["fontsize"] = 8
            node.attr["height"] = len(wrapped) * 0.25

        for block in bytecode_blocks:
            node = ownership_graph.get_node(block)
            node.attr["fillcolor"] = "lightseagreen"
            if block in block_data:
                data = block_data[block]
            else:
                print("Unable to find memory block for bytecode at 0x{:08x}.".format(block))
                continue
            prelude = Prelude(io.BufferedReader(io.BytesIO(data)))
            node.attr["shape"] = "plaintext"
            node.attr["style"] = "invisible"
            code_info_size = prelude.code_info_size
            rows = ""
            remaining_bytecode = len(data) - 16
            while code_info_size >= 16:
                rows += "<tr><td colspan=\"16\" bgcolor=\"palegreen\" height=\"18\" width=\"80\"></td></tr>"
                code_info_size -= 16
                remaining_bytecode -= 16
            if code_info_size > 0:
                rows += ("<tr><td colspan=\"{}\" bgcolor=\"palegreen\" height=\"18\" width=\"{}\"></td>"
                         "<td colspan=\"{}\" bgcolor=\"seagreen\" height=\"18\" width=\"{}\"></td></tr>"
                        ).format(code_info_size, code_info_size * (80 / 16), (16 - code_info_size), (80 / 16) * (16 - code_info_size))
                remaining_bytecode -= 16
            for i in range(remaining_bytecode // 16):
                rows += "<tr><td colspan=\"16\" bgcolor=\"seagreen\" height=\"18\" width=\"80\"></td></tr>"
            node.attr["label"] = "<<table border=\"1\" cellspacing=\"0\"><tr><td colspan=\"16\" bgcolor=\"lightseagreen\" height=\"18\" width=\"80\">0x{:08x}</td></tr>{}</table>>".format(block, rows)

        for block in qstr_chunks:
            if block not in block_data:
                ownership_graph.delete_node(block)
                continue
            data = block_data[block]
            qstrs_in_chunk = ""
            offset = 0
            while offset < len(data) - 1:
                qstr_hash, qstr_len = struct.unpack_from("<BB", data, offset=offset)
                if qstr_hash == 0:
                    qstrs_in_chunk += " " * (len(data) - offset)
                    offset = len(data)
                    continue
                offset += 2 + qstr_len + 1
                qstrs_in_chunk += "  " + data[offset - qstr_len - 1: offset - 1].decode("utf-8")
            printable_qstrs = ""
            for i in range(len(qstrs_in_chunk)):
                c = qstrs_in_chunk[i]
                if c not in string.printable or c in "\v\f":
                    printable_qstrs += "░"
                else:
                    printable_qstrs += qstrs_in_chunk[i]
            wrapped = []
            for i in range(0, len(printable_qstrs), 16):
                wrapped.append(html.escape(printable_qstrs[i:i+16]))
            node = ownership_graph.get_node(block)
            node.attr["label"] = "<<table border=\"1\" cellspacing=\"0\" bgcolor=\"lightsalmon\" width=\"80\"><tr><td height=\"18\" >0x{:08x}</td></tr><tr><td height=\"{}\" >{}</td></tr></table>>".format(block, 18 * (len(wrapped) - 1), "<br/>".join(wrapped))
            node.attr["fontname"] = "FiraCode-Medium"
            node.attr["fontpath"] = "/Users/tannewt/Library/Fonts/"
            node.attr["fontsize"] = 8

        print("Total free space:", BYTES_PER_BLOCK * total_free)
        print("Longest free space:", BYTES_PER_BLOCK * longest_free)

        # First render the graph of objects on the heap.
        if draw_heap_ownership:
            ownership_graph.layout(prog="dot")
            fn = os.path.join(output_directory, "heap_ownership{:04d}.png".format(snapshot_num))
            print(fn)
            ownership_graph.draw(fn)

        # Second, render the heap layout in memory order.
        for node in ownership_graph:
            try:
                address = int(node.name)
            except ValueError:
                ownership_graph.remove_node(node)
                continue
            block = (address - pool_start) // 16
            x = block // 64
            y = 64 - block % 64
            try:
                height = float(node.attr["height"])
            except:
                height = 0.25
            #print(hex(address), "height", height, y)
            #if address in block_data:
            #    print(hex(address), block, len(block_data[address]), x, y, height)
            node.attr["pos"] = "{},{}".format(x * 80, (y - (height - 0.25) * 2) * 18) # in inches

        # Clear edge positioning from ownership graph layout.
        if draw_heap_ownership:
            for edge in ownership_graph.iteredges():
                del edge.attr["pos"]

        # Reformat block nodes so they are the correct size and do not have keys in them.
        for block in sorted(map_element_blocks):
            try:
                node = ownership_graph.get_node(block)
            except KeyError:
                if block != 0:
                    print("Unable to find memory block for 0x{:08x}. Is there something running?".format(block))
                continue
            #node.attr["fillcolor"] = "gold"
            if block not in block_data:
                continue
            data = block_data[block]
            #print("0x{:08x}".format(block))
            cells = []
            for i in range(len(data) // 8):
                key, value = struct.unpack_from("<II", data, offset=(i * 8))
                if key == MP_OBJ_NULL or key == MP_OBJ_SENTINEL:
                    #print("  <empty slot>")
                    cells.append(("", " "))
                else:
                    #print("  {}, {}".format(format(key), format(value)))
                    cells.append((key, ""))
                    if value in block_data:
                        edge = ownership_graph.get_edge(block, value)
                        edge.attr["tailport"] = str(key)
            rows = ""
            for i in range(len(cells) // 2):
                rows += "<tr><td port=\"{}\" height=\"18\" width=\"40\">{}</td><td port=\"{}\" height=\"18\" width=\"40\">{}</td></tr>".format(
                    cells[2*i][0],
                    cells[2*i][1],
                    cells[2*i+1][0],
                    cells[2*i+1][1])
            node.attr["label"] = "<<table bgcolor=\"gold\" border=\"1\" cellpadding=\"0\" cellspacing=\"0\">{}</table>>".format(rows)


        ownership_graph.add_node("center", pos="{},{}".format(total_width // 2 - 40, total_height // 2), shape="plaintext", label=" ")
        ownership_graph.graph_attr["viewport"] = "{},{},1,{}".format(total_width, total_height, "center")

        ownership_graph.has_layout = True

        if draw_heap_layout:
            fn = os.path.join(output_directory, "heap_layout{:04d}.png".format(snapshot_num))
            print(fn)
            ownership_graph.draw(fn)
示例#24
0
 def get_nxgraph_from_gviz(gviz):
     graph = pgv.AGraph(gviz.source)
     nxgraph = nx.nx_agraph.from_agraph(graph)
     return nxgraph
import networkx as nx 
from networkx.drawing.nx_agraph import write_dot
import pygraphviz as pgv 
import os 

G=nx.Graph(pgv.AGraph("network.dot"))

color={"METABOLIC":"red", "OTHER_RNA":"green","TXNFACTOR":"blue","PRE_TRNA":"yellow"}

for n in G.nodes():
    if G.nodes[n]['type'] in color:
        G.nodes[n]['color']=color[G.nodes[n]['type']]
    else:
        G.nodes[n]['color']='black'
    G.nodes[n]['width']=min( G.degree(n)/15, 1.5)
#set edge color same as node if the both terminal same type
for e in G.edges():
    if G.nodes[e[0]]['type'] == G.nodes[e[1]]['type']:
        G.edges[e]['color']=G.nodes[e[0]]['color']

out_file_name="network5.dot"
write_dot(G, out_file_name)
os.system("sfdp -Goverlap=prism -Nshape=point -Goutputorder=edgesfirst -Tsvg "+out_file_name+" -O")
示例#26
0
def test_node_attribute_update():
    A = pgv.AGraph()
    A.add_node(1, label="test", spam="eggs")
    A.add_node(1, label="updated")
    ans = """strict graph { node [label="\\N"]; 1 [label=updated, spam=eggs]; }"""
    assert stringify(A) == ans
示例#27
0
from lst26 import *

import pygraphviz as pgv

G = pgv.AGraph(strict=False, directed=False)

G.graph_attr['rankdir'] = 'LR'
G.node_attr['shape'] = 'circle'

U = sorted(UA.keys())
for i in range(0, len(U)-1):
    for j in range(i+1, len(U)):
        ui = U[i]
        uj = U[j]
        x = len(UA[ui] & UA[uj])
        if x > 0:
            G.add_edge(ui, uj, label=x)

G.draw('graph.png', prog='dot')
示例#28
0
文件: parse.py 项目: Roboy/pysdf
 def plot_to_file(self, plot_filename, prefix=''):
   import pygraphviz as pgv
   graph = pgv.AGraph(directed=True)
   self.plot(graph, prefix)
   graph.draw(plot_filename, prog='dot')
示例#29
0
 def test_neighbors(self):
     A = pgv.AGraph()
     A.add_edges_from([(1, 2), (2, 3)])
     assert_equal(sorted(A.neighbors(2)), ['1', '3'])
     assert_equal(sorted(A.neighbors_iter(2)), ['1', '3'])
     assert_equal(sorted(A.iterneighbors(2)), ['1', '3'])
示例#30
0
        val = getattr(node, attr)
        node_label += '\n{}: {}'.format(attr, val)

    if isinstance(node, c_ast.Assignment):
        color = 'green'
    elif isinstance(node, c_ast.Return):
        color = 'red'
    elif isinstance(node, c_ast.FuncDef):
        if node.decl.name == 'main':
            color = 'yellow'

    add_node(node_id, node_label, parent_id, edge_label, color)

    for child in node.children():
        parse_node(child[1], node_id, child[0], color)


filename = sys.argv[1]

ast = pycparser.parse_file(filename, use_cpp=True)

graph = pgv.AGraph()

counter = 0

parse_node(ast)

graph.layout('dot')
graph.write(filename + '.dot')
graph.draw(filename + '.png')