Ejemplo n.º 1
0
def create_feed_dict(rand, batch_size, num_nodes_min_max, theta, input_ph,
                     target_ph):
    """Creates placeholders for the model training and evaluation.

  Args:
    rand: A random seed (np.RandomState instance).
    batch_size: Total number of graphs per batch.
    num_nodes_min_max: A 2-tuple with the [lower, upper) number of nodes per
      graph. The number of nodes for a graph is uniformly sampled within this
      range.
    theta: A `float` threshold parameters for the geographic threshold graph's
      threshold. Default= the number of nodes.
    input_ph: The input graph's placeholders, as a graph namedtuple.
    target_ph: The target graph's placeholders, as a graph namedtuple.

  Returns:
    feed_dict: The feed `dict` of input and target placeholders and data.
    raw_graphs: The `dict` of raw networkx graphs.
  """
    inputs, targets, raw_graphs = generate_networkx_graphs(
        rand, batch_size, num_nodes_min_max, theta)
    input_graphs = utils_np.networkxs_to_graphs_tuple(inputs)
    target_graphs = utils_np.networkxs_to_graphs_tuple(targets)
    feed_dict = {input_ph: input_graphs, target_ph: target_graphs}
    return feed_dict, raw_graphs
Ejemplo n.º 2
0
def make_x_y(ll,yll,token,tokeny):#xll yll  token=x_str2id  tokeny=y_str2id
    g = nx.Graph()
    gy=nx.Graph()
    n=len(ll)
    #加节点 特征
    for ni,char in enumerate(ll):
        g.add_node(ni,features=[token[char]],char=char)
        y=yll[ni]
        if y=='unlabel':
            y='PAD'
        gy.add_node(ni,features=[tokeny[y]],char=y)

    #加边
    for ii in range(n)[:-1]:
        g.add_edge(ii,ii+1,features=[0])
        g.add_edge(ii+1, ii,features=[0])
        #
        gy.add_edge(ii,ii+1,features=[0])
        gy.add_edge(ii+1, ii, features=[0])

    # global
    g.graph['features']=0
    gy.graph['features']=0

    ### 增加 4 个triggerRoot node
    g.add_node(n,features=[token['triggerRoot']],char='triggerRoot')
    g.add_node(n+1,features=[token['triggerRoot']],char='triggerRoot')
    g.add_node(n + 2, features=[token['triggerRoot']], char='triggerRoot')
    g.add_node(n + 3, features=[token['triggerRoot']], char='triggerRoot')
    #### ygraph 增加4个 pad
    gy.add_node(n,features=[0],char='PAD')
    gy.add_node(n+1,features=[0],char='PAD')
    gy.add_node(n + 2, features=[0], char='PAD')
    gy.add_node(n + 3, features=[0], char='PAD')

    for nod in g.nodes(data=True):
        nid,feadic=nod
        if nid in [n,n+1,n+2,n+3]:continue # 是triggerRoot节点
        ###
        if feadic['char'] not in event_mention_typ:continue # 额外的triggerRoot 只和event mention链接
        g.add_edge(nid,n,features=[0])
        g.add_edge(n, nid, features=[0])
        #
        g.add_edge(n+1, nid, features=[0])
        g.add_edge(nid,n+1, features=[0])
        g.add_edge(n + 2, nid, features=[0])
        g.add_edge(nid, n + 2, features=[0])
        g.add_edge(n + 3, nid, features=[0])
        g.add_edge(nid, n + 3, features=[0])
        #print ('')





    gtx = utils_np.networkxs_to_graphs_tuple([g])
    gty=utils_np.networkxs_to_graphs_tuple([gy])
    #print ('')

    return g,gy,gtx,gty
Ejemplo n.º 3
0
    def test_nested_features(self):
        graph_0 = utils_np.networkxs_to_graphs_tuple(
            [_generate_graph(0, 3),
             _generate_graph(1, 2)])
        graph_1 = utils_np.networkxs_to_graphs_tuple([_generate_graph(2, 2)])
        graph_2 = utils_np.networkxs_to_graphs_tuple([_generate_graph(3, 3)])
        graphs_ = [
            gr.map(tf.convert_to_tensor, graphs.ALL_FIELDS)
            for gr in [graph_0, graph_1, graph_2]
        ]

        def _create_nested_fields(graphs_tuple):
            new_nodes = ({
                "a": graphs_tuple.nodes,
                "b": [graphs_tuple.nodes + 1, graphs_tuple.nodes + 2]
            }, )

            new_edges = [{
                "c": graphs_tuple.edges + 5,
                "d": (graphs_tuple.edges + 1, graphs_tuple.edges + 3),
            }]
            new_globals = []

            return graphs_tuple.replace(nodes=new_nodes,
                                        edges=new_edges,
                                        globals=new_globals)

        graphs_ = [_create_nested_fields(gr) for gr in graphs_]
        concat_graph = utils_tf.concat(graphs_, axis=0)

        actual_nodes = concat_graph.nodes
        actual_edges = concat_graph.edges
        actual_globals = concat_graph.globals

        expected_nodes = tree.map_structure(lambda *x: tf.concat(x, axis=0),
                                            *[gr.nodes for gr in graphs_])
        expected_edges = tree.map_structure(lambda *x: tf.concat(x, axis=0),
                                            *[gr.edges for gr in graphs_])
        expected_globals = tree.map_structure(lambda *x: tf.concat(x, axis=0),
                                              *[gr.globals for gr in graphs_])

        tree.assert_same_structure(expected_nodes, actual_nodes)
        tree.assert_same_structure(expected_edges, actual_edges)
        tree.assert_same_structure(expected_globals, actual_globals)

        tree.map_structure(self.assertAllEqual, expected_nodes, actual_nodes)
        tree.map_structure(self.assertAllEqual, expected_edges, actual_edges)
        tree.map_structure(self.assertAllEqual, expected_globals,
                           actual_globals)

        # Borrowed from `test_concat_first_axis`:
        self.assertAllEqual(np.array([3, 2, 2, 3]), concat_graph.n_node)
        self.assertAllEqual(np.array([2, 1, 1, 2]), concat_graph.n_edge)
        self.assertAllEqual(np.array([1, 2, 4, 6, 8, 9]), concat_graph.senders)
        self.assertAllEqual(np.array([0, 0, 3, 5, 7, 7]),
                            concat_graph.receivers)
Ejemplo n.º 4
0
def test_graph_network(tf_session):
    logdir = os.path.join(TEST_FOLDER, 'test_logdir/with_trace')
    os.makedirs(logdir, exist_ok=True)

    starcluster_graphs_nx, gaia_graphs_nx = make_example_graphs(
        2,
        num_stars=10,
        sc_edge_size=2,
        sc_node_size=12,
        sc_global_size=5,
        g_edge_size=3,
        g_node_size=13,
        g_global_size=6)

    with tf_session.graph.as_default():
        sc_pl = placeholders_from_networkxs(starcluster_graphs_nx,
                                            force_dynamic_num_graphs=False)
        g_pl = placeholders_from_networkxs(gaia_graphs_nx,
                                           force_dynamic_num_graphs=False)

        sc_graphtuple = networkxs_to_graphs_tuple(starcluster_graphs_nx)
        g_graphtuple = networkxs_to_graphs_tuple(gaia_graphs_nx)

        encoded_size = 7
        t_network = StarClusterTNetwork(encoded_size,
                                        sc_encoder_latent_size=16,
                                        sc_encoder_num_layers=2,
                                        sc_decoder_latent_size=16,
                                        sc_decoder_num_layers=2,
                                        g_encoder_latent_size=16,
                                        g_encoder_num_layers=2)

        t_network_output = t_network(g_pl,
                                     sc_pl,
                                     num_samples=2,
                                     num_processing_steps=1)

        summary = tf.summary.merge_all()
        writer = tf.compat.v1.summary.FileWriter(logdir,
                                                 tf_session.graph,
                                                 session=tf_session)
        tf_session.run(tf.global_variables_initializer())

        run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
        run_metadata = tf.RunMetadata()

        t_network_output_res, summary_eval = tf_session.run(
            [t_network_output, summary],
            feed_dict={
                sc_pl: sc_graphtuple,
                g_pl: g_graphtuple
            },
            options=run_options,
            run_metadata=run_metadata)
        writer.add_run_metadata(run_metadata, 'step%d' % 0)
        writer.add_summary(summary_eval, 0)
Ejemplo n.º 5
0
def create_feed_dict_yr(rand, batch_size, num_nodes_min_max, theta, input_ph,
                        target_ph):
    """Creates placeholders for the model training and evaluation.

  Args:
    rand: A random seed (np.RandomState instance).
    batch_size: Total number of graphs per batch.
    num_nodes_min_max: A 2-tuple with the [lower, upper) number of nodes per
      graph. The number of nodes for a graph is uniformly sampled within this
      range.
    theta: A `float` threshold parameters for the geographic threshold graph's
      threshold. Default= the number of nodes.
    input_ph: The input graph's placeholders, as a graph namedtuple.
    target_ph: The target graph's placeholders, as a graph namedtuple.

  Returns:
    feed_dict: The feed `dict` of input and target placeholders and data.
    raw_graphs: The `dict` of raw networkx graphs.
  """
    inputs, targets, raw_graphs = generate_networkx_graphs(
        rand, batch_size, num_nodes_min_max, theta)
    input_graphs = utils_np.networkxs_to_graphs_tuple(inputs)
    target_graphs = utils_np.networkxs_to_graphs_tuple(targets)
    # for k in all_fields:
    #   input_graphs.replace(k=tf.constant(input_graphs.k))
    input_graphs = input_graphs.replace(edges=tf.constant(input_graphs.edges))
    input_graphs = input_graphs.replace(
        globals=tf.constant(input_graphs.globals))
    input_graphs = input_graphs.replace(nodes=tf.constant(input_graphs.nodes))
    input_graphs = input_graphs.replace(
        n_edge=tf.constant(input_graphs.n_edge))
    input_graphs = input_graphs.replace(
        n_node=tf.constant(input_graphs.n_node))
    input_graphs = input_graphs.replace(
        receivers=tf.constant(input_graphs.receivers))
    input_graphs = input_graphs.replace(
        senders=tf.constant(input_graphs.senders))

    target_graphs = target_graphs.replace(
        edges=tf.constant(target_graphs.edges))
    target_graphs = target_graphs.replace(
        globals=tf.constant(target_graphs.globals))
    target_graphs = target_graphs.replace(
        nodes=tf.constant(target_graphs.nodes))
    target_graphs = target_graphs.replace(
        n_edge=tf.constant(target_graphs.n_edge))
    target_graphs = target_graphs.replace(
        n_node=tf.constant(target_graphs.n_node))
    target_graphs = target_graphs.replace(
        receivers=tf.constant(target_graphs.receivers))
    target_graphs = target_graphs.replace(
        senders=tf.constant(target_graphs.senders))

    return [input_graphs, target_graphs]
Ejemplo n.º 6
0
def create_feed_dict(generator,
                     batch_size,
                     input_ph,
                     target_ph,
                     is_trained=True):
    inputs, targets = generator(batch_size, is_trained)
    input_graphs = utils_np.networkxs_to_graphs_tuple(inputs)
    target_graphs = utils_np.networkxs_to_graphs_tuple(targets)
    feed_dict = {input_ph: input_graphs, target_ph: target_graphs}

    return feed_dict
Ejemplo n.º 7
0
 def test_networkxs_to_graphs_tuple_raises_key_error(self):
   """If the "features" field is not present in the nodes or edges."""
   graph_nx = _single_data_dict_to_networkx(self.graphs_dicts_in[-1])
   first_node = list(graph_nx.nodes(data=True))[0]
   del first_node[1]["features"]
   with self.assertRaisesRegexp(
       KeyError, "This could be due to the node having been silently added"):
     utils_np.networkxs_to_graphs_tuple([graph_nx])
   graph_nx = _single_data_dict_to_networkx(self.graphs_dicts_in[-1])
   first_edge = list(graph_nx.edges(data=True))[0]
   del first_edge[2]["features"]
   with self.assertRaises(KeyError):
     utils_np.networkxs_to_graphs_tuple([graph_nx])
 def test_networkxs_to_graphs_tuple_raises_assertion_error(self):
     """Either all nodes (resp. edges) should have features, or none of them."""
     graph_nx = _single_data_dict_to_networkx(self.graphs_dicts_in[-1])
     first_node = list(graph_nx.nodes(data=True))[0]
     first_node[1]["features"] = None
     with self.assertRaisesRegexp(
             ValueError, "Either all the nodes should have features"):
         utils_np.networkxs_to_graphs_tuple([graph_nx])
     graph_nx = _single_data_dict_to_networkx(self.graphs_dicts_in[-1])
     first_edge = list(graph_nx.edges(data=True))[0]
     first_edge[2]["features"] = None
     with self.assertRaisesRegexp(
             ValueError, "Either all the edges should have features"):
         utils_np.networkxs_to_graphs_tuple([graph_nx])
Ejemplo n.º 9
0
	def act(self,defState, defNode, eps):
		"""
		parse defstate to the input format for feed_dict
		evaulate network to give action
		"""
		if (defState.nodes[defNode]["isDef"] != 1):
			raise ValueError("def location doesn't match")

		""" if episod is low or random value is les than epsilon
			take random action
		"""
		if (eps < OBSERVEEPS) or (random.random() < EPSILON):
			validActions = list(defState.out_edges([defNode]))
			a = self.random.randint(0, len(validActions),size=1)[0]
			return validActions[a]

		gin = self._gtmp2intmp(defState)
		test_values = self.sess.run({
			"outputs":self.output_ops_tr
			}, feed_dict={self.inputPh: utils_np.networkxs_to_graphs_tuple([gin])})

		outg = utils_np.graphs_tuple_to_networkxs(test_values["outputs"][-1])[0]
		outg = nx.DiGraph(outg)
		self.outg = outg

		validActions = list(defState.out_edges([defNode]))

		qdict = dict()

		for e in validActions:
			qdict[e] = outg.get_edge_data(*e)["features"][0]

		return max(qdict, key=qdict.get)
Ejemplo n.º 10
0
 def test_feed_data(self):
   networkx = [_generate_graph(batch_index) for batch_index in range(16)]
   placeholders = utils_tf.placeholders_from_networkxs(
       networkx, force_dynamic_num_graphs=True)
   # Does not need to be the same size
   networkxs = [_generate_graph(batch_index) for batch_index in range(2)]
   with self.test_session() as sess:
     output = sess.run(
         placeholders,
         utils_tf.get_feed_dict(placeholders,
                                utils_np.networkxs_to_graphs_tuple(networkxs)))
   self.assertAllEqual(
       np.array([[0, 0], [1, 0], [2, 0], [3, 0], [0, 1], [1, 1], [2, 1],
                 [3, 1]]), output.nodes)
   self.assertEqual(np.float32, output.nodes.dtype)
   self.assertAllEqual(np.array([[0], [1]]), output.globals)
   self.assertEqual(np.float32, output.globals.dtype)
   sorted_edges_content = sorted(
       [(x, y, z)
        for x, y, z in zip(output.receivers, output.senders, output.edges)])
   self.assertAllEqual([0, 0, 1, 4, 4, 5],
                       [x[0] for x in sorted_edges_content])
   self.assertAllEqual([1, 2, 3, 5, 6, 7],
                       [x[1] for x in sorted_edges_content])
   self.assertEqual(np.float64, output.edges.dtype)
   self.assertAllEqual(
       np.array([[0, 1, 0], [1, 2, 0], [2, 3, 0], [0, 1, 1], [1, 2, 1],
                 [2, 3, 1]]), [x[2] for x in sorted_edges_content])
Ejemplo n.º 11
0
def create_feed_dict(input_ph, target_ph, inputs, targets):
    """Creates the feed dict for the placeholders for the model training and evaluation.

    Args:
        input_ph: The input graph's placeholders, as a graph namedtuple.
        target_ph: The target graph's placeholders, as a graph namedtuple.
        inputs: The input graphs
        targets: The target graphs

    Returns:
        feed_dict: The feed `dict` of input and target placeholders and data.
    """
    input_graphs = utils_np.networkxs_to_graphs_tuple(inputs)
    target_graphs = utils_np.networkxs_to_graphs_tuple(targets)
    feed_dict = {input_ph: input_graphs, target_ph: target_graphs}
    return feed_dict
Ejemplo n.º 12
0
 def test_raise_all_or_no_nones(self, none_field):
     graph_0 = utils_np.networkxs_to_graphs_tuple(
         [_generate_graph(0, 3),
          _generate_graph(1, 2)])
     graph_1 = utils_np.networkxs_to_graphs_tuple([_generate_graph(2, 2)])
     graph_2 = utils_np.networkxs_to_graphs_tuple([_generate_graph(3, 3)])
     graphs_ = [
         gr.map(tf.convert_to_tensor, graphs.ALL_FIELDS)
         for gr in [graph_0, graph_1, graph_2]
     ]
     graphs_[1] = graphs_[1].replace(**{none_field: None})
     with self.assertRaisesRegex(
             ValueError,
             "Different set of keys found when iterating over data dictionaries."
     ):
         utils_tf.concat(graphs_, axis=0)
Ejemplo n.º 13
0
def generate_example_random_choice(positions, properties, k=26, plot=False):
    print('choice nn')
    idx_list = np.arange(len(positions))
    virtual_node_positions = positions[np.random.choice(idx_list, 1000, replace=False)]

    kdtree = cKDTree(virtual_node_positions)
    dist, indices = kdtree.query(positions)

    virtual_properties = np.zeros((len(np.bincount(indices)), len(properties[0])))

    mean_sum = [lambda x: np.bincount(indices, weights=x) / np.maximum(1., np.bincount(indices)),  # mean
                lambda x: np.bincount(indices, weights=x)]  # sum

    mean_sum_enc = [0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1]

    for p, enc in zip(np.arange(len(properties[0])), mean_sum_enc):
        virtual_properties[:, p] = mean_sum[enc](properties[:, p])
        virtual_positions = virtual_properties[:, :3]

    graph = nx.DiGraph()
    kdtree = cKDTree(virtual_positions)
    dist, idx = kdtree.query(virtual_positions, k=k + 1)
    receivers = idx[:, 1:]  # N,k
    senders = np.arange(virtual_positions.shape[0])  # N
    senders = np.tile(senders[:, None], [1, k])  # N,k
    receivers = receivers.flatten()
    senders = senders.flatten()

    n_nodes = virtual_positions.shape[0]

    pos = dict()  # for plotting node positions.
    edgelist = []
    for node, feature, position in zip(np.arange(n_nodes), virtual_properties, virtual_positions):
        graph.add_node(node, features=feature)
        pos[node] = position[:2]

    # edges = np.stack([senders, receivers], axis=-1) + sibling_node_offset
    for u, v in zip(senders, receivers):
        graph.add_edge(u, v, features=np.array([1., 0.]))
        graph.add_edge(v, u, features=np.array([1., 0.]))
        edgelist.append((u, v))
        edgelist.append((v, u))

    graph.graph["features"] = np.array([0.])
    # plotting

    print('len(pos) = {}\nlen(edgelist) = {}'.format(len(pos), len(edgelist)))
    if plot:
        fig, ax = plt.subplots(1, 1, figsize=(20, 20))
        draw(graph, ax=ax, pos=pos, node_color='blue', edge_color='red', node_size=10, width=0.1)

        image_dir = '/data2/hendrix/images/'
        graph_image_idx = len(glob.glob(os.path.join(image_dir, 'graph_image_*')))
        plt.savefig(os.path.join(image_dir, 'graph_image_{}'.format(graph_image_idx)))

    return networkxs_to_graphs_tuple([graph],
                                     node_shape_hint=[virtual_positions.shape[1] + virtual_properties.shape[1]],
                                     edge_shape_hint=[2])
Ejemplo n.º 14
0
 def test_concat_last_axis(self):
   graph0 = utils_np.networkxs_to_graphs_tuple(
       [_generate_graph(0, 3), _generate_graph(1, 2)])
   graph1 = utils_np.networkxs_to_graphs_tuple(
       [_generate_graph(2, 3), _generate_graph(3, 2)])
   graph0 = graph0.map(tf.convert_to_tensor, graphs.ALL_FIELDS)
   graph1 = graph1.map(tf.convert_to_tensor, graphs.ALL_FIELDS)
   concat_graph = utils_tf.concat([graph0, graph1], axis=-1)
   self.assertAllEqual(
       np.array([[0, 0, 0, 2], [1, 0, 1, 2], [2, 0, 2, 2], [0, 1, 0, 3],
                 [1, 1, 1, 3]]), concat_graph.nodes)
   self.assertAllEqual(
       np.array([[0, 1, 0, 0, 1, 2], [1, 2, 0, 1, 2, 2], [0, 1, 1, 0, 1, 3]]),
       concat_graph.edges)
   self.assertAllEqual(np.array([3, 2]), concat_graph.n_node)
   self.assertAllEqual(np.array([2, 1]), concat_graph.n_edge)
   self.assertAllEqual(np.array([1, 2, 4]), concat_graph.senders)
   self.assertAllEqual(np.array([0, 0, 3]), concat_graph.receivers)
   self.assertAllEqual(np.array([[0, 2], [1, 3]]), concat_graph.globals)
Ejemplo n.º 15
0
def create_feed_dict(all_db, rand, batch_size, input_ph, target_ph, dataset, graphcache):
    """Creates placeholders for the model training and evaluation.

    Args:
        rand: A random seed (np.RandomState instance).
        batch_size: Total number of graphs per batch.
        input_ph: The input graph's placeholders, as a graph namedtuple.
        target_ph: The target graph's placeholders, as a graph namedtuple.
        dataset: 'train', 'val', 'test'
    Returns:
        feed_dict: The feed `dict` of input and target placeholders and data.
        raw_graphs: The `dict` of raw networkx graphs.
        
    """
    inputs, targets, raw_graphs, selids = generate_networkx_graphs(graphcache, all_db, rand, batch_size, dataset)
    input_graphs = utils_np.networkxs_to_graphs_tuple(inputs)
    target_graphs = utils_np.networkxs_to_graphs_tuple(targets)
    feed_dict = {input_ph: input_graphs, target_ph: target_graphs}
    return feed_dict, raw_graphs, selids
Ejemplo n.º 16
0
 def test_get_feed_dict_raises(self, none_fields):
   networkxs = [_generate_graph(batch_index) for batch_index in range(16)]
   placeholders = utils_tf.placeholders_from_networkxs(networkxs)
   feed_values = utils_np.networkxs_to_graphs_tuple(networkxs)
   with self.assertRaisesRegexp(ValueError, ""):
     utils_tf.get_feed_dict(
         placeholders.map(lambda _: None, none_fields), feed_values)
   with self.assertRaisesRegexp(ValueError, ""):
     utils_tf.get_feed_dict(placeholders,
                            feed_values.map(lambda _: None, none_fields))
 def test_concat_first_axis(self, none_fields):
     graph_0 = utils_np.networkxs_to_graphs_tuple(
         [_generate_graph(0, 3),
          _generate_graph(1, 2)])
     graph_1 = utils_np.networkxs_to_graphs_tuple([_generate_graph(2, 2)])
     graph_2 = utils_np.networkxs_to_graphs_tuple([_generate_graph(3, 3)])
     graphs_ = [
         gr.map(tf.convert_to_tensor, graphs.ALL_FIELDS)
         for gr in [graph_0, graph_1, graph_2]
     ]
     graphs_ = [gr.map(lambda _: None, none_fields) for gr in graphs_]
     concat_graph = utils_tf.concat(graphs_, axis=0)
     for none_field in none_fields:
         self.assertEqual(None, getattr(concat_graph, none_field))
     concat_graph = concat_graph.map(tf.no_op, none_fields)
     with self.test_session() as sess:
         concat_graph = sess.run(concat_graph)
     if "nodes" not in none_fields:
         self.assertAllEqual(np.array([0, 1, 2, 0, 1, 0, 1, 0, 1, 2]),
                             [x[0] for x in concat_graph.nodes])
         self.assertAllEqual(np.array([0, 0, 0, 1, 1, 2, 2, 3, 3, 3]),
                             [x[1] for x in concat_graph.nodes])
     if "edges" not in none_fields:
         self.assertAllEqual(np.array([0, 1, 0, 0, 0, 1]),
                             [x[0] for x in concat_graph.edges])
         self.assertAllEqual(np.array([0, 0, 1, 2, 3, 3]),
                             [x[2] for x in concat_graph.edges])
     self.assertAllEqual(np.array([3, 2, 2, 3]), concat_graph.n_node)
     self.assertAllEqual(np.array([2, 1, 1, 2]), concat_graph.n_edge)
     if "senders" not in none_fields:
         # [1, 2], [1], [1], [1, 2] and 3, 2, 2, 3 nodes
         # So we are summing [1, 2, 1, 1, 2] with [0, 0, 3, 5, 7, 7]
         self.assertAllEqual(np.array([1, 2, 4, 6, 8, 9]),
                             concat_graph.senders)
     if "receivers" not in none_fields:
         # [0, 0], [0], [0], [0, 0] and 3, 2, 2, 3 nodes
         # So we are summing [0, 0, 0, 0, 0, 0] with [0, 0, 3, 5, 7, 7]
         self.assertAllEqual(np.array([0, 0, 3, 5, 7, 7]),
                             concat_graph.receivers)
     if "globals" not in none_fields:
         self.assertAllEqual(np.array([[0], [1], [2], [3]]),
                             concat_graph.globals)
Ejemplo n.º 18
0
    def next(self, raw_graphs=False):
        if self.indx + self.batch_size > self.len:
            self.indx = 0
            if self.shuffle:
                np.random.shuffle(self.indices)
        indx_range = self.indices[range(self.indx, self.indx + self.batch_size)]

        graphs = [self.graphs[i] for i in indx_range]
        ys = [self.y_matrices[i] for i in indx_range]
        targets = copy.deepcopy(graphs)
        for i in range(len(ys)):
            graphs[i].graph['features'] = np.array([0, 0]).astype(np.float)
            targets[i].graph['features'] = np.array(ys[i]).astype(np.float)

        self.indx += self.batch_size

        if raw_graphs == True:
            return graphs, targets
        else:
            return utils_np.networkxs_to_graphs_tuple(graphs), utils_np.networkxs_to_graphs_tuple(targets)
Ejemplo n.º 19
0
    def infer(self, input_graphs, target_graphs):

        reload_file = Path(self._reload_fle)

        input_ph, target_ph = create_placeholders(input_graphs, target_graphs)
        input_ph, target_ph = make_all_runnable_in_session(input_ph, target_ph)
        output_ops_ge = self._model(input_ph, self._num_processing_steps_ge)
        saver = tf.train.import_meta_graph(reload_file.as_posix() + '.meta')

        sess = tf.Session()
        sess.run(tf.global_variables_initializer())
        tf.reset_default_graph()
        with sess.as_default():
            if not reload_file.is_dir():
                saver.restore(sess, reload_file.as_posix())
            else:
                print("no file found, restoring failed")

            input_graphs_tuple = utils_np.networkxs_to_graphs_tuple(
                input_graphs)
            target_graphs_tuple = utils_np.networkxs_to_graphs_tuple(
                target_graphs)
            feed_dict = {
                input_ph: input_graphs_tuple,
                target_ph: target_graphs_tuple,
            }
            test_values = sess.run(
                {
                    "target": target_ph,
                    "outputs": output_ops_ge,
                },
                feed_dict=feed_dict)

            correct_ge, solved_ge = existence_accuracy(
                test_values["target"],
                test_values["outputs"][-1],
                use_edges=False)

            testing_info = 0, 0, 0, 0, [correct_ge], 0, [solved_ge]

        return test_values, testing_info
 def test_networkxs_to_graphs_tuple(self):
     graph0 = utils_np.data_dicts_to_graphs_tuple(self.graphs_dicts_in)
     graph_nxs = []
     for data_dict in self.graphs_dicts_in:
         graph_nxs.append(_single_data_dict_to_networkx(data_dict))
         hints = {
             "edge_shape_hint": data_dict["edges"].shape[1:],
             "node_shape_hint": data_dict["nodes"].shape[1:],
             "data_type_hint": data_dict["nodes"].dtype,
         }
     graph = utils_np.networkxs_to_graphs_tuple(graph_nxs, **hints)
     self._assert_graph_equals_np(graph0, graph, force_edges_ordering=True)
Ejemplo n.º 21
0
def create_feed_dict(sources, targets, source_ph, target_ph):
    """Creates placeholders for the model training and evaluation.

    Args:
        rand: A random seed (np.RandomState instance).
        batch_size: Total number of graphs per batch.
        min_max_nodes: A 2-tuple with the [lower, upper) number of nodes per
            graph. The number of nodes for a graph is uniformly sampled within this
            range.
        geo_density: A `float` threshold parameters for the geographic threshold graph's
            threshold. Default= the number of nodes.
        source_ph: The source graph's placeholders, as a graph namedtuple.
        target_ph: The target graph's placeholders, as a graph namedtuple.

    Returns:
        feed_dict: The feed `dict` of source and target placeholders and data.
    """
    source_graphs = utils_np.networkxs_to_graphs_tuple(sources)
    target_graphs = utils_np.networkxs_to_graphs_tuple(targets)
    feed_dict = {source_ph: source_graphs, target_ph: target_graphs}
    return feed_dict
Ejemplo n.º 22
0
def create_feed_dict(input_ph,
                     target_ph,
                     input_graphs,
                     target_graphs,
                     batch_processing=True):
    if batch_processing:
        input_graphs = input_graphs
        target_graphs = target_graphs
    else:
        input_graphs = [input_graphs]
        target_graphs = [target_graphs]

    input_tuple = utils_np.networkxs_to_graphs_tuple(input_graphs)
    target_tuple = utils_np.networkxs_to_graphs_tuple(target_graphs)

    input_dct = utils_tf.get_feed_dict(input_ph, input_tuple)
    target_dct = utils_tf.get_feed_dict(target_ph, target_tuple)

    input_ph_runnable, target_ph_runnable = make_all_runnable_in_session(
        input_ph, target_ph)

    return input_ph_runnable, target_ph_runnable, {**input_dct, **target_dct}
Ejemplo n.º 23
0
def create_feed_dict(rand, batch_size, raw_input_graphs, raw_target_graphs,
                     edge_permutations, input_ph, target_ph, weight_ph,
                     edge_rel_weights, sample_ids, replace):
    """Creates placeholders for the model training and evaluation."""
    # Create some example data for inspecting the vector sizes.
    inputs, targets = generate_networkx_graphs(rand, batch_size,
                                               raw_input_graphs,
                                               raw_target_graphs,
                                               edge_permutations, sample_ids,
                                               replace)
    input_graphs = utils_np.networkxs_to_graphs_tuple(inputs)
    target_graphs = utils_np.networkxs_to_graphs_tuple(targets)
    graph_edge_weights = edge_rel_weights[target_graphs[1][:,
                                                           -1].astype(np.int)]
    norm_graph_edge_weights = graph_edge_weights / (
        graph_edge_weights[graph_edge_weights > 0].mean())
    feed_dict = {
        input_ph: input_graphs,
        target_ph: target_graphs,
        weight_ph: norm_graph_edge_weights
    }
    return feed_dict
def generate_example_nn(positions, properties, k=1, plot=False):
    """
    Generate a k-nn graph from positions.

    Args:
        positions: [num_points, 3] positions used for graph constrution.
        properties: [num_points, F0,...,Fd] each node will have these properties of shape [F0,...,Fd]
        k: int, k nearest neighbours are connected.
        plot: whether to plot graph.

    Returns: GraphTuple
    """
    graph = nx.OrderedMultiDiGraph()

    kdtree = cKDTree(positions)
    dist, idx = kdtree.query(positions, k=k + 1)
    receivers = idx[:, 1:]  #N,k
    senders = np.arange(positions.shape[0])  #N
    senders = np.tile(senders[:, None], [1, k])  #N,k
    receivers = receivers.flatten()
    senders = senders.flatten()

    n_nodes = positions.shape[0]

    pos = dict()  # for plotting node positions.
    edgelist = []

    for node, feature, position in zip(np.arange(n_nodes), properties,
                                       positions):
        graph.add_node(node, features=feature)
        pos[node] = position[:2]

    # edges = np.stack([senders, receivers], axis=-1) + sibling_node_offset
    for u, v in zip(senders, receivers):
        graph.add_edge(u, v, features=None)
        graph.add_edge(v, u, features=None)
        edgelist.append((u, v))
        edgelist.append((v, u))

    graph.graph['features'] = None

    # plotting

    if plot:
        fig, ax = plt.subplots(1, 1, figsize=(12, 12))
        draw(graph, ax=ax, pos=pos, node_color='green', edge_color='red')
        plt.show()

    return networkxs_to_graphs_tuple(
        [graph], node_shape_hint=[positions.shape[1] + properties.shape[1]])
Ejemplo n.º 25
0
def placeholders_from_networkxs(graph_nxs,
                                node_shape_hint=None,
                                edge_shape_hint=None,
                                data_type_hint=tf.float32,
                                force_dynamic_num_graphs=True,
                                name="placeholders_from_networkxs"):
    """Constructs placeholders compatible with a list of networkx instances.

  Given a list of networkxs instances, constructs placeholders compatible with
  the shape of those graphs.

  The networkx graph should be set up such that, for fixed shapes `node_shape`,
   `edge_shape` and `global_shape`:
    - `graph_nx.nodes(data=True)[i][-1]["features"]` is, for any node index i, a
      tensor of shape `node_shape`, or `None`;
    - `graph_nx.edges(data=True)[i][-1]["features"]` is, for any edge index i, a
      tensor of shape `edge_shape`, or `None`;
    - `graph_nx.edges(data=True)[i][-1]["index"]`, if present, defines the order
      in which the edges will be sorted in the resulting `data_dict`;
    - `graph_nx.graph["features"] is a tensor of shape `global_shape` or `None`.

  Args:
    graph_nxs: A container of `networkx.MultiDiGraph`s.
    node_shape_hint: (iterable of `int` or `None`, default=`None`) If the graph
      does not contain nodes, the trailing shape for the created `NODES` field.
      If `None` (the default), this field is left `None`. This is not used if
      `graph_nx` contains at least one node.
    edge_shape_hint: (iterable of `int` or `None`, default=`None`) If the graph
      does not contain edges, the trailing shape for the created `EDGES` field.
      If `None` (the default), this field is left `None`. This is not used if
      `graph_nx` contains at least one edge.
    data_type_hint: (numpy dtype, default=`np.float32`) If the `NODES` or
      `EDGES` fields are autocompleted, their type.
    force_dynamic_num_graphs: A `bool` that forces the batch dimension to be
      dynamic. Defaults to `True`.
    name: (string, optional) A name for the operation.

  Returns:
    An instance of `graphs.GraphTuple` placeholders compatible with the
      dimensions of the graph_nxs.
  """
    with tf.name_scope(name):
        graph = utils_np.networkxs_to_graphs_tuple(
            graph_nxs, node_shape_hint, edge_shape_hint,
            data_type_hint.as_numpy_dtype())
        return _placeholders_from_graphs_tuple(
            graph, force_dynamic_num_graphs=force_dynamic_num_graphs)
Ejemplo n.º 26
0
 def test_feed_data_no_nodes(self):
   networkx = [
       _generate_graph(batch_index, n_nodes=0, add_edges=False)
       for batch_index in range(16)
   ]
   placeholders = utils_tf.placeholders_from_networkxs(
       networkx, force_dynamic_num_graphs=True)
   # Does not need to be the same size
   networkxs = [
       _generate_graph(batch_index, n_nodes=0, add_edges=False)
       for batch_index in range(2)
   ]
   self.assertEqual(None, placeholders.nodes)
   self.assertEqual(None, placeholders.edges)
   with self.test_session() as sess:
     output = sess.run(
         placeholders.replace(nodes=tf.no_op(), edges=tf.no_op()),
         utils_tf.get_feed_dict(placeholders,
                                utils_np.networkxs_to_graphs_tuple(networkxs)))
   self.assertAllEqual(np.array([[0], [1]]), output.globals)
   self.assertEqual(np.float32, output.globals.dtype)
Ejemplo n.º 27
0
    def _get_observation(self):
        graph = ObservationBuilder(features=self._features).get_observation(
            self.env)

        gt = utils_np.networkxs_to_graphs_tuple([graph])

        # build action indices here to make sure the indices matches the
        # one the network is seeing
        self.actions = []
        edges = gt.edges if gt.edges is not None else []  # may be None
        for (u, v, d) in zip(gt.senders, gt.receivers, edges):
            possible = d[POSSIBLE_IDX] == 1
            if not possible:
                continue
            else:
                source = graph.node[u]["represents"]
                target = graph.node[v]["represents"]
                timeslot = int(d[TIMESLOT_IDX])
                self.actions.append((source, target, timeslot))

        return gt
Ejemplo n.º 28
0
    def next_batch(self, index):
        graph_dicts = []
        labels = []
        for k, path in enumerate(
                self.second_image_paths[self.batch_size *
                                        index:self.batch_size * (index + 1)]):

            temp_str = path.split('\\')[-1]
            begin = temp_str.find('_')
            end = temp_str.find('.')
            label = self.alpha.index(temp_str[begin + 2:end])

            img = cv2.imread(path)
            img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)

            img = img / 255.0

            graph_dicts.append(self.deal_image(img))
            labels.append(label)

        return utils_np.networkxs_to_graphs_tuple(graph_dicts), np.eye(
            len(self.alpha))[labels]
def test_batch_gen():
    #['caveman_2', 'caveman_4']
    gen_graph = GenerateDataGraph(type_dataset='caveman_4', num_perm=10)
    epochs = 1
    batch_size = 40
    for epoch in range(epochs):
        print("\n########## epoch " + str(epoch + 1) + " ##########")
        gen_trainig = gen_graph.train_generator(batch_size=batch_size)
        counter = 0
        for gt_graph, set_feature, in_graph in gen_trainig:
            print("---- batch ----")
            #print("gt_graph.shape: ", gt_graph.shape)
            print("set_feature.shape: ", set_feature.shape)
            print("in_graph.shape: ", in_graph.shape)
            #draw_graph(G_arr=gt_graph, row=2, col=2, pos=set_feature, fname='comm/comm_'+str(counter))
            for g in gt_graph:
                nx.set_node_attributes(G=g, name="features", values=0)
                nx.set_edge_attributes(G=g, name="features", values=0)
            graph_tuple = utils_np.networkxs_to_graphs_tuple(gt_graph)
            print(graph_tuple)
            #for k in range(len(gt_graph)):
            #  counter += 1
            #  save_graph(name='comm/comm_'+str(counter), points_coord=set_feature[k], adj=gt_graph[k], dim=2)
            break
Ejemplo n.º 30
0
def generate_example(positions, properties, k_mean=26, plot=False):
    """
    Generate a geometric graph from positions.

    Args:
        positions: [num_points, 3] positions used for graph constrution.
        properties: [num_points, F0,...,Fd] each node will have these properties of shape [F0,...,Fd]
        k_mean: float
        plot: whether to plot graph.

    Returns: GraphTuple
    """
    graph = nx.DiGraph()
    sibling_edgelist = []
    parent_edgelist = []
    pos = dict()  # for plotting node positions.
    real_nodes = list(np.arange(positions.shape[0]))
    while positions.shape[0] > 1:
        # n_nodes, n_nodes
        dist = np.linalg.norm(positions[:, None, :] - positions[None, :, :], axis=-1)
        opt_screen_length = find_screen_length(dist, k_mean)
        print("Found optimal screening length {}".format(opt_screen_length))

        distance_matrix_no_loops = np.where(dist == 0., np.inf, dist)
        A = distance_matrix_no_loops < opt_screen_length

        senders, receivers = np.where(A)
        n_edge = senders.size
        # [1,0] for siblings, [0,1] for parent-child
        sibling_edges = np.tile([[1., 0.]], [n_edge, 1])

        # num_points, F0,...Fd
        # if positions is to be part of features then this should already be set in properties.
        # We don't concatentate here. Mainly because properties could be an image, etc.
        sibling_nodes = properties
        n_nodes = sibling_nodes.shape[0]

        sibling_node_offset = len(graph.nodes)
        for node, feature, position in zip(np.arange(sibling_node_offset, sibling_node_offset + n_nodes), sibling_nodes,
                                           positions):
            graph.add_node(node, features=feature)
            pos[node] = position[:2]

        # edges = np.stack([senders, receivers], axis=-1) + sibling_node_offset
        for u, v in zip(senders + sibling_node_offset, receivers + sibling_node_offset):
            graph.add_edge(u, v, features=np.array([1., 0.]))
            graph.add_edge(v, u, features=np.array([1., 0.]))
            sibling_edgelist.append((u, v))
            sibling_edgelist.append((v, u))

        # for virtual nodes
        sibling_graph = GraphsTuple(nodes=None,  # sibling_nodes,
                                    edges=None,
                                    senders=senders,
                                    receivers=receivers,
                                    globals=None,
                                    n_node=np.array([n_nodes]),
                                    n_edge=np.array([n_edge]))

        sibling_graph = graphs_tuple_to_networkxs(sibling_graph)[0]
        # completely connect
        connected_components = sorted(nx.connected_components(nx.Graph(sibling_graph)), key=len)
        _positions = []
        _properties = []
        for connected_component in connected_components:
            print("Found connected component {}".format(connected_component))
            indices = list(sorted(list(connected_component)))
            virtual_position, virtual_property = make_virtual_node(positions[indices, :], properties[indices, ...])
            _positions.append(virtual_position)
            _properties.append(virtual_property)

        virtual_positions = np.stack(_positions, axis=0)
        virtual_properties = np.stack(_properties, axis=0)

        ###
        # add virutal nodes
        # num_parents, 3+F
        parent_nodes = virtual_properties
        n_nodes = parent_nodes.shape[0]
        parent_node_offset = len(graph.nodes)
        parent_indices = np.arange(parent_node_offset, parent_node_offset + n_nodes)
        # adding the nodes to global graph
        for node, feature, virtual_position in zip(parent_indices, parent_nodes, virtual_positions):
            graph.add_node(node, features=feature)
            print("new virtual {}".format(node))
            pos[node] = virtual_position[:2]

        for parent_idx, connected_component in zip(parent_indices, connected_components):

            child_node_indices = [idx + sibling_node_offset for idx in list(sorted(list(connected_component)))]
            for child_node_idx in child_node_indices:
                graph.add_edge(parent_idx, child_node_idx, features=np.array([0., 1.]))
                graph.add_edge(child_node_idx, parent_idx, features=np.array([0., 1.]))
                parent_edgelist.append((parent_idx, child_node_idx))
                parent_edgelist.append((child_node_idx, parent_idx))
                print("connecting {}<->{}".format(parent_idx, child_node_idx))

        positions = virtual_positions
        properties = virtual_properties

    # plotting

    virutal_nodes = list(set(graph.nodes) - set(real_nodes))
    if plot:
        fig, ax = plt.subplots(1, 1, figsize=(12, 12))
        draw(graph, ax=ax, pos=pos, node_color='green', edgelist=[], nodelist=real_nodes)
        draw(graph, ax=ax, pos=pos, node_color='purple', edgelist=[], nodelist=virutal_nodes)
        draw(graph, ax=ax, pos=pos, edge_color='blue', edgelist=sibling_edgelist, nodelist=[])
        draw(graph, ax=ax, pos=pos, edge_color='red', edgelist=parent_edgelist, nodelist=[])
        plt.show()

    return networkxs_to_graphs_tuple([graph],
                                     node_shape_hint=[positions.shape[1] + properties.shape[1]],
                                     edge_shape_hint=[2])