Example #1
0
def scale_free_graph(n, alpha=0.41, beta=0.54, delta_in=0.2, delta_out=0):
    def _choose_node(G, distribution, delta):
        cumsum = 0.0
        psum = float(sum(
            distribution.values())) + float(delta) * len(distribution)
        r = random.random()
        for i in range(0, len(distribution)):
            cumsum += (distribution[i] + delta) / psum
            if r < cumsum:
                break
        return i

    G = MultiDiGraph()
    G.add_edges_from([(0, 1), (1, 2), (2, 0)])
    gamma = 1 - alpha - beta

    while len(G) < n:
        r = random.random()
        if r < alpha:
            v = len(G)
            w = _choose_node(G, G.in_degree(), delta_in)
        elif r < alpha + beta:
            v = _choose_node(G, G.out_degree(), delta_out)
            w = _choose_node(G, G.in_degree(), delta_in)
        else:
            v = _choose_node(G, G.out_degree(), delta_out)
            w = len(G)
        G.add_edge(v, w)
    return G
def scale_free_graph(n, alpha=0.41,beta=0.54,delta_in=0.2,delta_out=0):
    def _choose_node(G,distribution,delta):
        cumsum = 0.0
        psum = float(sum(distribution.values()))+float(delta)*len(distribution)
        r = random.random()
        for i in range(0, len(distribution)):
            cumsum += (distribution[i]+delta)/psum
            if r < cumsum:
                break
        return i

    G = MultiDiGraph()
    G.add_edges_from([(0,1),(1,2),(2,0)])
    gamma = 1 - alpha - beta

    while len(G)<n:
        r = random.random()
        if r < alpha:
            v = len(G)
            w = _choose_node(G, G.in_degree(),delta_in)
        elif r < alpha+beta:
            v = _choose_node(G, G.out_degree(),delta_out)
            w = _choose_node(G, G.in_degree(),delta_in)
        else:
            v = _choose_node(G, G.out_degree(),delta_out)
            w = len(G)
        G.add_edge(v,w)
    return G
Example #3
0
def _insert_pooling(graph: nx.MultiDiGraph, first_node: Node,
                    second_node: Node, spatial_dims):
    """
    This function inserts point wise pooling layer between two nodes
    """
    log.debug("STRIDE PROP: Insert pooling between {} and {}".format(
        first_node.name, second_node.name))
    stride_prop = second_node.stride_prop
    assert len(graph.get_edge_data(first_node.id, second_node.id)) == 1
    eattrs = graph.get_edge_data(first_node.id, second_node.id)[0]
    graph.remove_edge(first_node.id, second_node.id)

    pooling = Pooling(
        graph,
        dict(name='Pooling_',
             spatial_dims=spatial_dims,
             window=np.array([1, 1, 1, 1]),
             output_spatial_shape=None,
             stride=np.array(stride_prop),
             pad_spatial_shape=np.array([[0, 0], [0, 0]]),
             pad=np.array([[0, 0], [0, 0], [0, 0], [0, 0]]),
             pool_method='max',
             is_partial_inferred=False))
    pooling_data = pooling.create_node_with_data([first_node])

    _clean_fw_tensor_attrs(pooling_data)

    graph.add_edges_from([(pooling_data.id, second_node.id, eattrs)])
def duplicate_shared_weights(graph: nx.MultiDiGraph):
    """
    This function finds all const data nodes that have more that one consumer and then duplicate them
    """
    data_nodes = [
        Node(graph, id) for id in graph.nodes()
        if Node(graph, id).soft_get('kind') == 'data'
    ]
    for node in data_nodes:
        # Check that node has const values and more than one consumer
        if len(node.out_nodes()) > 1 and node.value is not None:
            # Here we delete all edges between base node and it's consumers (except first), and then duplicate this
            # node to connect with other consumers
            while len(node.out_nodes()) > 1:
                out_node = node.out_node(1)

                if len(graph.get_edge_data(node.id, out_node.id)) != 1:
                    raise Error(
                        'There is more than one edge from {} node to {} node.'.
                        format(node.id, out_node.id))
                e_attrs = graph.get_edge_data(node.id, out_node.id)[0]

                graph.remove_edge(node.id, out_node.id)
                data = Op.create_input_data_node(graph,
                                                 "Copy_{}".format(node.id),
                                                 np.array(node.value),
                                                 graph.node[node.id])

                graph.add_edges_from([(data.id, out_node.id, e_attrs)])
Example #5
0
    def replace_pattern(self, graph: nx.MultiDiGraph, match: dict):
        relu = match['relu']
        reshape1 = match['reshape1']
        reshape2_data = match['reshape2_data']
        conv = match['conv']

        if np.max(conv.pad) == 0:
            return

        relu_input = relu.in_node()

        # Disconnect InputData-x->ReLU->Data-x->Reshape1
        edge_attrs = graph.get_edge_data(relu.out_node().id, reshape1.id)[0]
        graph.remove_edge(relu_input.id, relu.id)
        graph.remove_edge(relu.out_node().id, reshape1.id)

        # Connect InputData-->Reshape1
        graph.add_edges_from([(relu_input.id, reshape1.id, edge_attrs)])

        # Insert ReLU:  Reshape2Data->ReLU->Data->Convolution
        edge_attrs = graph.get_edge_data(reshape2_data.id, conv.id)[0]
        graph.remove_edge(reshape2_data.id, conv.id)
        graph.add_edges_from([(reshape2_data.id, relu.id, {
            'in': 0
        }), (relu.out_node().id, conv.id, edge_attrs)])
Example #6
0
    def __init__(self, F, arg):
        """
        If arg is a list:
        Generate the core graph corresponding to the group generated by group_gens.
        If arg is a graph:
        Check for validity and do all folds.
        """
        assert is_FreeGroup(F), "F must be a free group"
        self.F = F
        self.F_rank = F.rank()
        self.letter_types = range(1, self.F_rank + 1)
        self.letters = list(range(-self.F_rank, 0)) + list(range(1, self.F_rank + 1))
        # -r, ..., -1, 1, ..., r

        if isinstance(arg, list):
            group_gens = arg
            assert all([gen in F for gen in group_gens]), "The generators must be elements of F."
            self.group_gens = group_gens

            G = MultiDiGraph()
            G.add_node((0,))  # the marked vertex (id)
            for i, gen in enumerate(self.group_gens):
                word = gen.Tietze()
                word_len = len(word)
                # new_nodes = [(i, j) for j in range(1, word_len)]
                # G.add_nodes_from(new_nodes)
                get_node = lambda j: (0,) if (j % word_len == 0) else (i, j)
                for j in range(word_len):
                    G.add_edge(get_node(j), get_node(j + 1), label=word[j])
                    G.add_edge(get_node(j + 1), get_node(j), label=-word[j])

        elif isinstance(arg, MultiDiGraph):
            # We are going to copy the graph, add reverse edges when needed,
            # and sort the edges.
            # The reason we sort the edges is to get a "canonical" version
            # of the object, so subgroup_gens would be the same in different
            # objects with the same graph.
            G = MultiDiGraph()
            G.add_nodes_from(arg.nodes())
            edges = arg.edges(data='label')
            G_edges = [e for e in edges]
            assert len(edges) == len(arg.edges()), "Every edge has to be labelled."
            for src, dst, letter in edges:
                assert letter in self.letters, \
                    f"The edge betwen {src} and {dst} has an invalid label"
                if (dst, src, -letter) not in G.edges(data='label'):
                    G_edges.append((dst, src, -letter))
            G.add_weighted_edges_from(sorted(G_edges), weight='label')

        else:
            raise ValueError("arg must be a list of words or a MultiDiGraph.")

        self.G = do_all_folds(G)

        # The subgraph of positive edges
        G_pos = MultiDiGraph()
        G_pos.add_edges_from([e for e in self.G.edges(data=True) if e[2]['label'] > 0])
        self.G_pos = G_pos

        self.subgroup_gens = tuple(sorted(self.get_subgroup()))
Example #7
0
def build_graph(nodes: List[Node], edges: List[Edge],
                **kwargs) -> MultiDiGraph:
    force_rebuild = kwargs.get("force_rebuild", False)
    save_checkpoint = kwargs.get("save_checkpoint", True)

    if not force_rebuild:
        if os.path.exists(GRAPH_CHECKPOINT):
            return load_graph()
    else:
        log.info("Graph checkpoint does not exist, building graph.")

    graph = MultiDiGraph()
    graph.add_nodes_from(nodes)

    ebunch = list(map(lambda x: (x.source, x.destination, x), edges))
    graph.add_edges_from(ebunch)

    assert graph.number_of_nodes() == len(nodes)
    assert graph.number_of_edges() == len(edges)

    if save_checkpoint:
        log.info("Checkpointing graph...")
        with open(GRAPH_CHECKPOINT, "wb") as file:
            pickle.dump(graph, file)

    return graph
Example #8
0
 def __add__(self, o):
     if not isinstance(o, MossNet):
         raise TypeError("unsupported operand type(s) for +: 'MossNet' and '%s'" % type(o).__name__)
     g = MultiDiGraph()
     g.add_edges_from(list(self.graph.edges(data=True)) + list(o.graph.edges(data=True)))
     g.add_nodes_from(list(self.graph.nodes(data=True)) + list(o.graph.nodes(data=True)))
     return MossNet(g)
Example #9
0
File: op.py Project: pc2/CustoNN2
    def create_data_node(graph: nx.MultiDiGraph,
                         op_node: Node,
                         attrs: dict = None,
                         edge_attrs: dict = None):
        assert op_node is not None and op_node.kind == 'op'
        assert len(op_node.out_nodes()) == 0
        if attrs is None:
            attrs = {}

        data_node = unique_id(graph, op_node.id)
        defaul_attrs = dict(kind='data',
                            precision="FP32",
                            name=data_node,
                            value=None,
                            shape=None,
                            data_type=None,
                            infer=None)
        defaul_attrs.update(attrs)
        graph.add_node(data_node, **add_attrs_props(defaul_attrs))
        data_node = Node(graph, data_node)
        if edge_attrs is not None:
            graph.add_edges_from([(op_node.id, data_node.id, {
                'out': 0,
                **edge_attrs
            })])
        else:
            graph.add_edges_from([(op_node.id, data_node.id, {'out': 0})])
        return data_node
Example #10
0
    def replace_sub_graph(self, graph: nx.MultiDiGraph, match: dict):
        node = match['op']

        if not node.has_valid('bias') or (node.has_valid('bias')
                                          and node.bias == 1):
            return

        # Calculate scale value & create Const op
        scale_value = np.array(1. / (pow(node.bias, node.beta)))
        node.alpha /= node.bias
        const_node = Const(graph,
                           dict(value=scale_value, shape=scale_value.shape))

        # Get all outputs for LRN layer
        out_nodes = [node for node in node.out_nodes().values()]

        # Create Mul node with inputs
        mul_node = Mul(graph, dict(name=node.id + "/Mul_"))
        mnode = mul_node.create_node(inputs=[node, const_node.create_node()])

        # Move edges from LRN to Mul node
        for out_node in out_nodes:
            edge_attrs = graph.get_edge_data(node.id, out_node.id)[0]
            graph.remove_edge(node.id, out_node.id)
            graph.add_edges_from([(mnode.id, out_node.id, edge_attrs)])
    def do_interprocedural_analyze_without_slice_criteria(
            self, to_graph: nx.MultiDiGraph):
        """
        图中存在外部函数调用,并且这些外部函数不包含切片准则
        将函数的PDG直接嫁接到原始图表示中
        function A {
            call function B  -- 展开
        }

        """

        for node_id in to_graph.nodes:

            # Note: 图中当前节点为函数调用,需要进行展开
            node_info = to_graph.nodes[node_id]
            if "called" in node_info:

                print("内部调用:{}".format(node_info))

                fid = node_info["called"][0]
                function_info = self.contract_info.get_function_info_by_fid(
                    fid)
                if function_info is None:
                    called_function = self.contract_info.get_function_by_fid(
                        fid)
                    function_info = FunctionInfo(self.contract_info,
                                                 called_function)

                # 目标函数名
                called_function_name = function_info.name

                # 全套大保健,需要优化
                control_flow_analyzer = ControlFlowAnalyzer(
                    self.contract_info, function_info)
                data_flow_analyzer = DataFlowAnalyzer(self.contract_info,
                                                      function_info)
                inter_analyzer = InterproceduralAnalyzer(
                    self.contract_info, function_info)
                graph_constructor = CodeGraphConstructor(
                    self.contract_info, function_info)

                control_flow_analyzer.do_control_dependency_analyze()  # 控制流分析
                data_flow_analyzer.do_data_semantic_analyze()  # 数据语义分析
                inter_analyzer.do_interprocedural_analyze_for_state_def(
                )  # 过程间全局变量数据流分析
                function_info.construct_dependency_graph(
                )  # 语义分析完之后进行数据增强,为切片做准备

                graph = graph_constructor.do_code_create_without_slice()  # 构图

                print("内部函数合并:merge {} to {}".format(graph.graph["name"],
                                                     to_graph.graph["name"]))
                graph, removed_semantic_edges = do_prepare_before_merge(
                    graph, called_function_name)
                to_graph = do_merge_graph1_to_graph2(graph, to_graph,
                                                     node_id)  # 内部函数调用展开
                to_graph.add_edges_from(removed_semantic_edges)

        return to_graph
Example #12
0
def add_reshape_before_op_node(graph: nx.MultiDiGraph, data_node_name: str,
                               op_node_name: str, edge_attrs: dict):
    """
    Adds reshape operation which expands dimension of the specified data tensor to 4D.
    :param graph: graph to operate on.
    :param data_node_name: the name of the data node to be reshaped to 4D tensor.
    :param op_node_name: name of the TFCustomSubgraphCall node which produces the tensor.
    :param edge_attrs: edge attributes which should be preserved.
    :return: None
    """
    data_node = Node(graph, data_node_name)

    graph.remove_edge(data_node_name, op_node_name)

    assert data_node['shape'] is not None

    new_shape = make_shape_4d(data_node['shape'])

    # reshape shape data node
    reshape_shape_data_node_name = unique_id(graph, "Reshape_shape_")
    graph.add_node(reshape_shape_data_node_name,
                   kind='data',
                   precision="FP32",
                   name=reshape_shape_data_node_name,
                   value=new_shape,
                   shape=[1])

    # reshape operation node
    reshape_node_name = unique_id(graph, "Reshape_")
    graph.add_node(reshape_node_name,
                   kind='op',
                   precision="FP32",
                   type='Reshape',
                   name=reshape_node_name,
                   op='Reshape',
                   data_type=data_node['data_type'])
    update_ie_fields(graph.node[reshape_node_name])

    # reshaped data node
    reshaped_value = None
    if data_node['value'] is not None:
        reshaped_value = np.reshape(data_node['value'], new_shape)
    reshaped_data_node_name = unique_id(graph, "reshaped_data_")
    graph.add_node(reshaped_data_node_name,
                   kind='data',
                   precision="FP32",
                   name=reshaped_data_node_name,
                   shape=new_shape,
                   value=reshaped_value,
                   nchw_layout=True)

    graph.add_edges_from([(data_node_name, reshape_node_name, {
        'in': 0
    }), (reshape_shape_data_node_name, reshape_node_name, {
        'in': 1
    }), (reshape_node_name, reshaped_data_node_name, {
        'out': 0
    }), (reshaped_data_node_name, op_node_name, edge_attrs)])
Example #13
0
def query_inductions(sample: nx.MultiDiGraph, edge_queries: List[List[Edge]],
                     ownership: Ownership):
    remote_queries = mpi.comm.alltoall(edge_queries)
    answers = [
        list(filter(lambda e: e[1] in ownership, q)) for q in remote_queries
    ]
    remote_answers = mpi.comm.alltoall(answers)
    for remote_answer in remote_answers:
        sample.add_edges_from(remote_answer)
Example #14
0
def remove_useless_split_action(graph: nx.MultiDiGraph, matches: dict):
    split_node = matches['split']
    input = split_node.in_node(1)
    output = split_node.out_node()
    graph.remove_edge(input.id, split_node.id)

    for u, v, d in list(graph.out_edges(output.id, data=True)):
        graph.add_edges_from([(input.id, v, d)])
        graph.remove_edge(u, v)
Example #15
0
def combine_dots(dotlist,sciid):
    g1=drawing.nx_agraph.read_dot(TMP+dotlist[-1])
    g = MultiDiGraph()
    for d in dotlist:
        g2=drawing.nx_agraph.read_dot(TMP + d)
        g1=compose(g1,g2)
    g.add_nodes_from(g1.nodes(data=True))
    g.add_edges_from(g1.edges(data=True))
    g.to_directed()
    g = nx_agraph.to_agraph(g)
    g.write(TMP+'-'.join(sciid)+'.dot')
Example #16
0
def combine_dots(dotlist, mathid):
    g1 = drawing.nx_agraph.read_dot(TMP + dotlist[-1])
    g = MultiDiGraph()
    for d in dotlist:
        g2 = drawing.nx_agraph.read_dot(TMP + d)
        g1 = compose(g1, g2)
    g.add_nodes_from(g1.nodes(data=True))
    g.add_edges_from(g1.edges(data=True))
    g.to_directed()
    g = nx_agraph.to_agraph(g)
    g.write(TMP + '-'.join(mathid) + '.dot')
Example #17
0
def sst(n):
    S = MultiDiGraph()
    morphisms = [('X' + str(m + 1), 'X' + str(m), 'd' + str(m) + str(i))
                 for m in range(n) for i in range(m + 2)]
    relations = []
    if n > 1:
        relations = [(('X'+str(m+2),'X'+str(m),'d'+str(m+1)+str(j)+'d'+str(m)+str(i)),\
                      ('X'+str(m+2),'X'+str(m),'d'+str(m+1)+str(i)+'d'+str(m)+str(j-1)))
                     for m in range(n-1) for j in range(1,m+3) for i in range(j)]
    S.add_edges_from(morphisms)
    return fic(S, relations)
Example #18
0
def combine_dots(dotlist):
    g1=drawing.nx_agraph.read_dot('dots/'+dotlist[-1])
    g = MultiDiGraph()
    for i in xrange(len(dotlist)):
        g2=drawing.nx_agraph.read_dot('dots/'+dotlist[i])
        g1=compose(g1,g2)
    g.add_nodes_from(g1.nodes(data=True))
    g.add_edges_from(g1.edges(data=True))
    g.to_directed()
    g = nx_agraph.to_agraph(g)
    g.write('dots/combined.dot')
Example #19
0
def convert_add_to_scaleshift(graph: nx.MultiDiGraph):
    for n in list(graph.nodes()):
        node = Node(graph, n)
        if node.has('op') and (node.op == 'BiasAdd' or node.op
                               == 'Add') and len(node.in_nodes()) == 2:
            tensor_id, value_id = get_tensor_id(node), get_value_id(node)
            if tensor_id is not None and value_id is not None and node.soft_get(
                    'can_be_scaleshift') is not False:
                node['type'] = 'ScaleShift'
                node['op'] = 'ScaleShift'
                node.in_node(value_id).value = np.squeeze(
                    node.in_node(value_id).value)
                node.in_node(value_id).shape = node.in_node(
                    value_id).value.shape

                # if the node was created with eltwise then it has attribute 'operation' which should be removed from
                # the IR
                if node.has('operation'):
                    del graph.node[n]['operation']

                bias_data = node.in_node(value_id)
                graph[bias_data.node][node.node][0]['in'] = 2
                graph[bias_data.node][node.node][0]['bin'] = 'biases'

                input_data = node.in_node(tensor_id)
                graph[input_data.node][node.node][0]['in'] = 0

                update_ie_fields(graph.node[node.id])

                weights_id = unique_id(graph, 'weights_')
                graph.add_node(
                    weights_id,
                    **add_attrs_props(
                        dict(kind='data',
                             precision="FP32",
                             name=weights_id,
                             value=None,
                             shape=None,
                             data_type=None,
                             infer=None)))
                wnode = Node(graph, weights_id)

                wnode['value'] = np.full_like(bias_data.value,
                                              1,
                                              dtype=np.float32)
                wnode['shape'] = np.array(wnode['value'].shape)

                graph.add_edges_from([
                    (weights_id, node.node, {
                        'in': 1,
                        'bin': 'weights'
                    }),
                ])
Example #20
0
    def generate_edges(graph: nx.MultiDiGraph, transfers_df: pd.DataFrame, period_df: pd.DataFrame) -> None:
        transfers_df = transfers_df[[
            'route_id', 'start_stop_id', 'end_stop_id', 'duration']]
        avg_transfers_df = transfers_df.groupby(['route_id', 'start_stop_id', 'end_stop_id']).mean().reset_index()

        def edge_generator():
            for _, route_id, start_node, end_node, duration in avg_transfers_df.itertuples():
                period = period_df.at[(route_id, 'period')]
                yield start_node, int(end_node), int(route_id), \
                    {'route_id': int(route_id), 'duration': int(duration), 'period': int(period), 'path': []}

        graph.add_edges_from(edge_generator())
Example #21
0
    def to_directed(self):
        """Return a directed representation of the graph.
 
        A new multidigraph is returned with the same name, same nodes and
        with each edge (u,v,data) replaced by two directed edges
        (u,v,data) and (v,u,data).
        
        """
        from networkx import MultiDiGraph 
        G=MultiDiGraph()
        G.add_nodes_from(self)
        G.add_edges_from( ((u,v,data) for u,nbrs in self.adjacency_iter() \
                for v,datalist in nbrs.iteritems() for data in datalist) )
        return G
Example #22
0
    def replace_op(self, graph: nx.MultiDiGraph, node: Node):
        in_node = node.in_node()
        out_nodes = [node for node in node.out_nodes().values()]
        graph.remove_edge(node.in_node().id, node.id)

        scalar_value_op = Const(graph, dict(value=node.scalar, shape=node.scalar.shape, symbol_dict={'name': node.id + '/const'}))
        add_op = Add(graph, dict(name=node.id + '/add_', symbol_dict={'name': node.id + '/add_'}))
        add_node = add_op.create_node(inputs=[in_node, scalar_value_op.create_node()])

        for out_node in out_nodes:
            edge_attrs = graph.get_edge_data(node.id, out_node.id)[0]
            graph.remove_edge(node.id, out_node.id)
            graph.add_edges_from([(add_node.id, out_node.id, edge_attrs)])

        return [add_node.id]
def nebula2networkx(client: GraphClient, nebula_space: str,
                    graph: nx.MultiDiGraph, vertex_list, edge_types):
    do_simple_execute(client, 'use ' + nebula_space)
    yield_statement = ",".join(
        [edge_type + "._dst" for edge_type in edge_types])
    seen_vertex = set()
    queue_vertex = []
    all_edges = {}
    for v in vertex_list:
        queue_vertex.insert(0, v)
    while len(queue_vertex):
        vertex = queue_vertex.pop()
        seen_vertex.add(vertex)
        get_edge_go_statement = "GO FROM {} OVER {} YIELD ".format(
            vertex, ','.join(edge_types)) + yield_statement
        edges_resp = client.execute_query(get_edge_go_statement)
        edges = [[] for _ in edge_types]
        if edges_resp.rows is not None:
            for row in edges_resp.rows:
                for ids, col in enumerate(row.columns):
                    if (col.getType()
                            == ttypes.ColumnValue.ID) and col.get_id() != 0:
                        edges[ids].append(col.get_id())
                        if col.get_id() not in seen_vertex:
                            seen_vertex.add(col.get_id())
                            queue_vertex.insert(0, col.get_id())
        all_edges[vertex] = edges
        # build networkX graph Node
        vertex_info_resp = fetch_info(client, "* " + str(vertex))
        vertex_info = handle_fetch_resp(vertex_info_resp)
        graph.add_node(vertex,
                       **vertex_info[0] if len(vertex_info) > 0 else {})

    # build networkX graph Edge
    for vertex_src, edges in all_edges.items():
        for edge_type_ids, vertexs_dst in enumerate(edges):
            if len(vertexs_dst) != 0:
                edge_info_fetch_statement = edge_types[
                    edge_type_ids] + ' ' + ','.join([
                        str(vertex_src) + "->" + str(dst)
                        for dst in vertexs_dst
                    ])

                edges_info = handle_fetch_resp(
                    fetch_info(client, edge_info_fetch_statement))
                graph.add_edges_from([(vertex_src, vertexs_dst[i],
                                       edges_info[i])
                                      for i in range(len(edges_info))])
Example #24
0
 def replace_sub_graph(self, graph: nx.MultiDiGraph, match: dict):
     ph = match['placeholder']
     if ph.name in self.replacement_dict:
         name = ph.name
         if ph.has_and_set('data_type'):
             data_type = ph.data_type
         else:
             data_type = SUPPORTED_DATA_TYPES[
                 graph.graph['cmd_params'].data_type][0]
         string_value = self.replacement_dict[name]
         try:
             if data_type != np.bool:
                 value = np.array(string_value, dtype=data_type)
             elif data_type == np.bool and graph.graph['fw'] == 'tf':
                 from mo.front.tf.common import tf_data_type_cast
                 if isinstance(string_value, list):
                     casted_list = list()
                     for v in np.array(string_value):
                         casted_list.append(
                             tf_data_type_cast[ph.data_type](v))
                     value = np.array(string_value, dtype=data_type)
                 else:
                     value = tf_data_type_cast[ph.data_type](string_value)
             else:
                 raise Error("Can not cast value {} to {} data_type".format(
                     string_value, data_type))
         except:
             raise Error("Can not cast value {} to {} data_type".format(
                 string_value, data_type))
         try:
             value = np.reshape(a=value, newshape=ph.shape)
         except:
             raise Error("Can not reshape value {} to shape {}".format(
                 value, ph.shape))
         out_edges = list(graph.out_edges(ph.id, data=True))
         new_node = Const(graph).create_node(
             attrs={
                 'value': value,
                 'data_type': type(value),
                 'name': name + '/const_placeholder',
                 'shape': ph.shape
             })
         erase_node(ph)
         graph.add_edges_from([(new_node.id, v, attrs)
                               for u, v, attrs in out_edges])
         log.info(
             "Placeholder node \"{}\" was replaced with Const node \"{}\" with value \"{}\""
             .format(name, new_node.name, value))
Example #25
0
    def add_to_graph(self, G: nx.MultiDiGraph) -> None:
        """
        Adds nodes for the given relative dating to the graph.

        Args:
            G: the graph to work on
        """
        G.add_nodes_from(self.items)
        for source in self.sources:
            G.add_edges_from(pairwise(self.items),
                             kind=self.kind,
                             source=source,
                             comments=self.comments,
                             dating=self,
                             xml=self.xmlsource,
                             ignore=self.ignore)
Example #26
0
def simplify_state(state: SDFGState,
                   remove_views: bool = False) -> MultiDiGraph:
    """
    Returns a networkx MultiDiGraph object that contains all the access nodes
    and corresponding edges of an SDFG state. The removed code nodes and map
    scopes are replaced by edges that connect their ancestor and succesor access
    nodes.
    :param state: The input SDFG state.
    :return: The MultiDiGraph object.
    """

    sdfg = state.parent

    # Copy the whole state
    G = MultiDiGraph()
    for n in state.nodes():
        G.add_node(n)
    for n in state.nodes():
        for e in state.all_edges(n):
            G.add_edge(e.src, e.dst)
    # Collapse all mappings and their scopes into one node
    scope_children = state.scope_children()
    for n in scope_children[None]:
        if isinstance(n, nodes.EntryNode):
            G.add_edges_from([(n, x)
                              for (y, x) in G.out_edges(state.exit_node(n))])
            G.remove_nodes_from(scope_children[n])
    # Remove all nodes that are not AccessNodes or have incoming
    # wcr edges and connect their predecessors and successors
    for n in state.nodes():
        if n in G.nodes():
            if (not isinstance(n, nodes.AccessNode) or
                (remove_views and isinstance(sdfg.arrays[n.data], data.View))):
                for p in G.predecessors(n):
                    for c in G.successors(n):
                        G.add_edge(p, c)
                G.remove_node(n)
            else:
                for e in state.all_edges(n):
                    if e.data.wcr is not None:
                        for p in G.predecessors(n):
                            for s in G.successors(n):
                                G.add_edge(p, s)
                        G.remove_node(n)
                        break

    return G
Example #27
0
def dataToGraphCSV(data,
                   graph_type,
                   save_filename=None,
                   graph_lib='graph_tool'):
    """
    Take our pandas dataframe containing all of our edge data and create the desired graph output.
    :param data:
        pandas data containing three columns, tweet id, author id, influenced user id
    :param graph_type:
    :param save_filename:
    :param graph_lib:
    :return:
    """
    # now that we have our data, we can output
    if graph_lib == 'graph_tool':
        G = gt.Graph(directed=True)
        G.vertex_properties['user_id'] = G.new_vertex_property('int64_t')
        G.edge_properties['tweet_id'] = G.new_edge_property('int64_t')

        edge_list = np.array(
            [(infl_uid, auth_uid, tweet_id)
             for tweet_id, infl_uid, auth_uid in data.iterrows()],
            dtype=np.int64)

        G.vp.user_id = G.add_edge_list(edge_list,
                                       hashed=True,
                                       eprops=[G.ep.tweet_id])

        if save_filename is not None:
            G.save(save_filename)

    elif graph_lib == 'networkx':
        G = MultiDiGraph(graph_type=', '.join(graph_type))

        G.add_edges_from([(infl_uid, auth_uid, {
            'tweet_id': tweet_id
        }) for tweet_id, infl_uid, auth_uid in data.iterrows()])

        if save_filename is not None:
            write_graphml(G, save_filename)

    elif graph_lib == 'edge_list':
        G = np.array([(infl_uid, auth_uid, tweet_id)
                      for idx, (tweet_id, created, auth_uid,
                                infl_uid) in data.iterrows()],
                     dtype=np.int64)
    return G
Example #28
0
def to_networkx(network: Network) -> Any:

    from networkx import Graph, DiGraph, MultiDiGraph, MultiGraph

    if network.directed and network.multiedges:
        G = MultiDiGraph()
    elif not network.directed and network.multiedges:
        G = MultiGraph()
    elif network.directed and not network.multiedges:
        G = DiGraph()
    else:
        G = Graph()
    G.add_nodes_from([(v, network.nodes[v].attributes)
                      for v in network.nodes.uids])
    G.add_edges_from([(network.edges[e].v.uid, network.edges[e].w.uid,
                       network.edges[e].attributes)
                      for e in network.edges.uids])
    return G
Example #29
0
def get_graph(model_spec, debug=False):
    '''
        Builds a networkx representation of the graph,
    '''

    graph = MultiDiGraph()

    graph.add_nodes_from(
        get_nodes(model_spec['edges']) + model_spec['legs']['in'].keys() +
        model_spec['legs']['out'].keys())

    graph.add_edges_from([(edge['source'], edge['target'], {
        'operator': edge['operator']
    }) for edge in model_spec['edges']])

    if debug:
        draw_graph(graph)

    return graph
Example #30
0
    def extend_graph(self, graph: nx.MultiDiGraph, stops_df: pd.DataFrame) -> nx.MultiDiGraph:
        first_stops = stops_df[['is_first']].reset_index('stop_id')
        first_stops = list(first_stops.drop_duplicates('stop_id')['stop_id'])
        start_routes = {}
        for first, second, route in graph.edges:
            if first not in start_routes:
                start_routes[first] = set()
            start_routes[first].add(route)

        for start_node in first_stops:
            if start_node not in start_routes:
                # TODO temporary fix, nodes with names PH and PT aren't in the dict
                continue
            for route_id in start_routes[start_node]:
                edges = self.get_edges_data_from(graph, start_node, route_id)
                for edge in edges:
                    _, _, first_neighbour, first_duration, period = edge
                    graph.add_edges_from(self.extended_edges_generator(
                        graph, start_node, first_neighbour, first_duration, route_id, period))
        return graph
Example #31
0
 def replace_sub_graph(self, graph: nx.MultiDiGraph, match: dict):
     node = match['softmax']
     if 'temperature' in node and node['temperature'] != 1.0:
         in_node = node.in_node()
         out_nodes = [node for node in node.out_nodes().values()]
         graph.remove_edge(node.in_node().id, node.id)
         temperature = np.array([1.0 / node.temperature])
         scalar_value_op = Const(
             graph,
             dict(value=temperature,
                  shape=temperature.shape,
                  symbol_dict={'name': node.id + '/const'}))
         mul_op = Mul(
             graph,
             dict(name=node.id + '/mul_',
                  symbol_dict={'name': node.id + '/mul_'}))
         mul_node = mul_op.create_node(
             inputs=[in_node, scalar_value_op.create_node()])
         edge_attrs = graph.get_edge_data(node.id, out_nodes[0].id)[0]
         graph.add_edges_from([(mul_node.id, node.id, edge_attrs)])
Example #32
0
def Glob(n):
    Glob = MultiDiGraph()
    s_morphisms = [('X' + str(m + 1), 'X' + str(m), 's' + str(m))
                   for m in range(n)]
    t_morphisms = [('X' + str(m + 1), 'X' + str(m), 't' + str(m))
                   for m in range(n)]
    morphisms = s_morphisms + t_morphisms
    relations = []
    if n > 1:
        ssts_relations = [
            (('X' + str(m + 2), 'X' + str(m), 's' + str(m + 1) + 's' + str(m)),
             ('X' + str(m + 2), 'X' + str(m), 't' + str(m + 1) + 's' + str(m)))
            for m in range(n - 1)
        ]
        sttt_relations = [
            (('X' + str(m + 2), 'X' + str(m), 's' + str(m + 1) + 't' + str(m)),
             ('X' + str(m + 2), 'X' + str(m), 't' + str(m + 1) + 't' + str(m)))
            for m in range(n - 1)
        ]
        relations = ssts_relations + sttt_relations
    Glob.add_edges_from(morphisms)
    return fic(Glob, relations)
Example #33
0
def merge_data_nodes(graph: nx.MultiDiGraph, survived: Node, removed: Node):
    if survived.has_and_set('is_output'):
        graph.node[removed.id].update({'is_output': True})

    for u, v, d in list(graph.in_edges(removed.id, data=True)):
        graph.add_edges_from([(u, survived.id, d)])
        graph.remove_edge(u, v)

    for u, v, d in list(graph.out_edges(removed.id, data=True)):
        graph.add_edges_from([(survived.id, v, d)])
        graph.remove_edge(u, v)

    for attr in graph.node[removed.id]:
        if not attr in ['name']:
            # We need to save debug info from removed data node
            if attr == 'fw_tensor_debug_info':
                if not survived.has_valid(attr):
                    survived[attr] = []
                for fw_tensor_debug_info in removed[attr]:
                    survived[attr].append(fw_tensor_debug_info)
            else:
                survived[attr] = removed[attr]
Example #34
0
def scale_free_graph(n, G=None,
                     alpha=0.41,
                     beta=0.54,
                     gamma=0.05,
                     delta_in=0.2,
                     delta_out=0,
                     seed=None):
    """Return a scale free directed graph

    Parameters
    ----------
    n : integer
       Number of nodes in graph

    G : NetworkX graph (optional)
       Use as starting graph in algorithm

    alpha : float 
       Probability for adding a new node conecgted to an existing node
       chosen randomly according to the in-degree distribution.

    beta : float
       Probability for adding an edge between two existing nodes.
       One existing node is chosen randomly according the in-degree 
       distribution and the other chosen randomly according to the out-degree 
       distribution.
       
    gamma : float
       Probability for adding a new node conecgted to an existing node
       chosen randomly according to the out-degree distribution.
        
    delta_in : float
       Bias for choosing ndoes from in-degree distribution.

    delta_out : float
       Bias for choosing ndoes from out-degree distribution.

    delta_out : float
       Bias for choosing ndoes from out-degree distribution.

    seed : integer (optional)
       Seed for random number generator

    Examples
    --------
    >>> G=nx.scale_free_graph(100)

    
    Notes
    -----
    The sum of alpha, beta, and gamma must be 1.

    Algorithm from
    
    @article{bollobas2003dsf,
    title={{Directed scale-free graphs}},
    author={Bollob{\'a}s, B. and Borgs, C. and Chayes, J. and Riordan, O.},
    journal={Proceedings of the fourteenth annual ACM-SIAM symposium on Discrete algorithms},
    pages={132--139},
    year={2003},
    publisher={Society for Industrial and Applied Mathematics Philadelphia, PA, USA}
    }

"""

    def _choose_node(G,distribution,delta):
        cumsum=0.0
        # normalization 
        psum=float(sum(distribution.values()))+float(delta)*len(distribution)
        r=random.random()
        for i in range(0,len(distribution)):
            cumsum+=(distribution[i]+delta)/psum
            if r < cumsum:  
                break
        return i

    if G is None:
        # start with 3-cycle
        G=MultiDiGraph()
        G.add_edges_from([(0,1),(1,2),(2,0)])

    if alpha <= 0:
        raise ValueError('alpha must be >= 0.')
    if beta <= 0:
        raise ValueError('beta must be >= 0.')
    if gamma <= 0:
        raise ValueError('beta must be >= 0.')

    if alpha+beta+gamma !=1.0:
        raise ValueError('alpha+beta+gamma must equal 1.')
        
    G.name="directed_scale_free_graph(%s,alpha=%s,beta=%s,gamma=%s,delta_in=%s,delta_out=%s)"%(n,alpha,beta,gamma,delta_in,delta_out)

    # seed random number generated (uses None as default)
    random.seed(seed)

    while len(G)<n:
        r = random.random()
        # random choice in alpha,beta,gamma ranges
        if r<alpha:
            # alpha
            # add new node v
            v = len(G) 
            # choose w according to in-degree and delta_in
            w = _choose_node(G, G.in_degree(with_labels=True),delta_in)
        elif r < alpha+beta:
            # beta
            # choose v according to out-degree and delta_out
            v = _choose_node(G, G.out_degree(with_labels=True),delta_out)
            # choose w according to in-degree and delta_in
            w = _choose_node(G, G.in_degree(with_labels=True),delta_in)
        else:
            # gamma
            # choose v according to out-degree and delta_out
            v = _choose_node(G, G.out_degree(with_labels=True),delta_out)
            # add new node w
            w = len(G) 
        G.add_edge(v,w)
        
    return G
Example #35
0
def parseArnetminerDataset():
    """
      Parse the four area dataset, and use only barebones structures to keep everything efficient.

        Skips papers that:
            (1)

        The final parsed network
    """

    inputFile = open(os.path.join(projectRoot, 'data','DBLP-citation-Feb21.txt'))
    graph = MultiDiGraph()

    # Sets for authors, papers, conferences, and terms found so far
    indexToPaperIdMap = {}
    citationCountMap = {}
    indexSet = set()

    beginning = inputFile.tell()

    print "Parsing nodes for graph..."

    # Counts for statistics
    VALID_PAPERS = 1566322 # 99.62% of total papers in DBLP dataset
    papersProcessed = 0
    skippedPaperIndices = set()
    invalidPaperIndices = set()

    # Add each paper to graph (adding missing associated terms, authors, and conferences)
    for title, authors, conference, terms, citationCount, index in __papersFromFile(inputFile, skippedPaperIndices, invalidPaperIndices):

        # Check that index is unique, and record it
        assert index not in indexSet
        indexSet.add(index)

        # Create unique identifier with paper index & title
        paperId = '%d----%s' % (index, title)
        citationCountMap[paperId] = citationCount
        indexToPaperIdMap[index] = paperId

        # Add symmetric edges & nodes (if they don't already exist in the network)
        for author in authors:
            graph.add_edges_from([(author, paperId), (paperId, author)])
        graph.add_edges_from([(conference, paperId), (paperId, conference)])
        for term in terms:
            graph.add_edges_from([(term, paperId), (paperId, term)])

        # Output progress
        papersProcessed += 1
        sys.stdout.write("\r Processed %d / %d papers..." % (papersProcessed, VALID_PAPERS))

    # Rewind file
    inputFile.seek(beginning)

    print "Parsing citations for graph..."

    # Counts for statistics
    papersProcessed = 0
    successfulCitations = 0
    omittedPaperCitations = 0
    invalidPaperCitations = 0
    invalidCitations = 0

    # Add citations to the graph
    for title, index, citations in __citationsFromFile(inputFile):
        citingId = '%d----%s' % (index, title)
        for citationIndex in citations:

            # Add citation edge if it was found
            if citationIndex in indexToPaperIdMap:
                successfulCitations += 1
                graph.add_edge(citingId, indexToPaperIdMap[citationIndex])

            # Tally missing citation appropriately
            elif citationIndex in skippedPaperIndices:
                omittedPaperCitations += 1
            elif citationIndex in invalidPaperIndices:
                invalidPaperCitations += 1
            else:
                print "\nCitation '%d' not found for '%s'" % (citationIndex, title)
                invalidCitations += 1

        # Output progress
        papersProcessed += 1
        sys.stdout.write("\r Processed Citations for %d / %d papers..." % (papersProcessed, VALID_PAPERS))

    # Basic statistics about cleanliness of citations
    totalCitations = invalidCitations + successfulCitations
    successfulCitationsPercent = 100 * float(successfulCitations) / totalCitations
    omittedPaperCitationsPercent = 100 * float(omittedPaperCitations) / totalCitations
    invalidPaperCitationsPercent = 100 * float(invalidPaperCitations) / totalCitations
    invalidCitationsPercent = 100 * float(invalidCitations) / totalCitations
    print "\n\nTotal Citations: %d" % totalCitations
    print "  Citations Added (Successful): %d (%2.2f%%)" % (successfulCitations, successfulCitationsPercent)
    print "  Citations Skipped (Skipped Paper): %d (%2.2f%%)" % (omittedPaperCitations, omittedPaperCitationsPercent)
    print "  Citations Skipped (Invalid Paper): %d (%2.2f%%)" % (invalidPaperCitations, invalidPaperCitationsPercent)
    print "  Citations Invalid (Unknown): %d (%2.2f%%)" % (invalidCitations, invalidCitationsPercent)

    return graph