def test_combined_loss(self): nodes = [ Node(Attribute(Tensor([-4., -8.]))), Node(Attribute(Tensor([1., 5.]))), Node(Attribute(Tensor([4., 4.]))), Node(Attribute(Tensor([0., 1., 5.]))) ] edges = [ Edge(nodes[0], nodes[1], Attribute(Tensor([1., 2., 3.]))), Edge(nodes[1], nodes[2], Attribute(Tensor([1., 2.]))), Edge(nodes[2], nodes[1], Attribute(Tensor([5.]))), Edge(nodes[1], nodes[3], Attribute(Tensor([1., 2., 3., 4.]))) ] u = Attribute( Tensor([[1., 2., 4., 3.], [8., 3., 0., 3.], [1., 7., 5., 3.]])) g1 = Graph(nodes, edges, attr=u) g2 = deepcopy(g1) g2.ordered_nodes[0].attr.val = Tensor([-4., -8.1]) g2.ordered_nodes[1].attr.val = Tensor([2., 6.]) g2.ordered_nodes[3].attr.val = Tensor([1., 1.5, 5.]) g2.ordered_edges[0].attr.val = Tensor([2., 3., 4.]) g2.ordered_edges[1].attr.val = Tensor([5., 10.]) g2.attr.val = Tensor([[2., 2., 4., 3.], [100, 3., 1., 3.], [1., 14., 5., 3.]]) loss = GraphLoss(e_fn=MSELoss(), v_fn=L1Loss(), u_fn=MSELoss()) loss_val = loss(g1, g2).detach().numpy() e_loss = (1. + (4**2 + 8**2) / 2) / 4 v_loss = (.1 / 2 + 2. / 2 + (1 + .5) / 3) / 4 u_loss = (1 + (8 - 100)**2 + 1 + 7**2) / 12 / 1 target_loss_val = v_loss + e_loss + u_loss self.assertTrue(np.isclose(loss_val, target_loss_val))
def test_vector_edge_loss(self): nodes = [Node(), Node(), Node()] edges = [ Edge(nodes[0], nodes[1], Attribute(Tensor([-50., -10., -5.]))), Edge(nodes[1], nodes[0], Attribute(Tensor([-40., 100., 120.]))), Edge(nodes[0], nodes[2], Attribute(Tensor([1., 3., 4.]))), Edge(nodes[0], nodes[0], Attribute(Tensor([2., 2., 2.]))) ] g1 = Graph(nodes, edges) g2 = deepcopy(g1) g2.ordered_edges[0].attr.val = Tensor([-45., -11., -5.]) g2.ordered_edges[1].attr.val = Tensor([-40., 200., 121.]) g2.ordered_edges[2].attr.val = Tensor([1.1, 3., 3.9]) g2.ordered_edges[3].attr.val = Tensor([2., 2., 2.1]) loss = GraphLoss(e_fn=MSELoss()) loss_val = loss(g1, g2).detach().numpy() # division by 3 because there are three entries per vector # division by 4 because there are four edges target_loss_val = ((-50. + 45.)**2 + (-10. + 11.)**2 + (-40. + 40.)**2 + (100. - 200.)**2 + (120. - 121.)**2 + (1 - 1.1)**2 + (4. - 3.9)**2 + (2 - 2.1)**2) / 3 / 4 self.assertTrue(np.isclose(loss_val, target_loss_val))
def test_graph_equality(self): v_0, v_1, v_2 = Node(Attribute(0.)), Node(Attribute(1.)), Node( Attribute(2.)) vs = [v_0, v_1, v_2] # nodes es = [Edge(v_0, v_1), Edge(v_0, v_2)] # edges g_0 = Graph(nodes=vs, edges=es) v_0, v_1, v_2 = Node(Attribute(0.)), Node(Attribute(1.)), Node( Attribute(2.)) vs = [v_0, v_1, v_2] # nodes es = [Edge(v_0, v_1), Edge(v_0, v_2)] # edges g_1 = Graph(nodes=vs, edges=es) self.assertTrue(g_0 == g_1)
def test_integrity_check(self): """ Create one valid and one invalid graph, the invalid graph has an edge that points to a node which is not contained in the set of nodes. """ v_0, v_1, v_2 = Node(), Node(), Node() v_3 = Node() vs = [v_0, v_1, v_2] # nodes es = [Edge(v_0, v_1), Edge(v_0, v_2)] Graph(nodes=vs, edges=es) # pass for valid edge set es.append(Edge(v_1, v_3)) with self.assertRaises(ValueError): Graph(nodes=vs, edges=es)
def add_reflexive_edges(self, attribute_generator: Optional[Callable[[Node], Attribute]] = None): if attribute_generator is None: attribute_generator = lambda _: Attribute() for n in self.nodes: e = Edge(n, n, attribute_generator(n)) self.add_edge(e)
def test_basic(self): """ Basic test w/o PyTorch, all attributes are scalars, edges do not have attributes. Feeds a graph through a basic graph block twice and compares to the target values after both passes. """ # create data structure v_0, v_1, v_2 = Node(Attribute(1)), Node(Attribute(10)), Node( Attribute(20)) vs = [v_0, v_1, v_2] # nodes es = [Edge(v_0, v_1), Edge(v_0, v_2), Edge(v_1, v_2)] g_0 = Graph(nodes=vs, edges=es, attr=Attribute(0)) # create block w/ functions block = GNBlock(phi_e=SenderIdentityEdgeUpdate(), phi_v=EdgeNodeSumNodeUpdate(), phi_u=MixedGlobalStateUpdate(), rho_ev=ScalarSumAggregation(), rho_vu=ScalarSumAggregation(), rho_eu=ScalarSumAggregation()) g_1 = block(g_0) v_0, v_1, v_2 = Node(Attribute(1)), Node(Attribute(10 + 1)), Node( Attribute(20 + 11)) vs = [v_0, v_1, v_2] # nodes es = [ Edge(v_0, v_1, Attribute(1)), Edge(v_0, v_2, Attribute(1)), Edge(v_1, v_2, Attribute(10)) ] g_1_target = Graph(nodes=vs, edges=es, attr=Attribute(35)) self.assertTrue(g_1 == g_1_target) g_2 = block(g_1) v_0, v_1, v_2 = Node(Attribute(1)), Node(Attribute(10 + 2)), Node( Attribute(20 + 11 + 12)) vs = [v_0, v_1, v_2] # nodes es = [ Edge(v_0, v_1, Attribute(1)), Edge(v_0, v_2, Attribute(1)), Edge(v_1, v_2, Attribute(11)) ] g_2_target = Graph(nodes=vs, edges=es, attr=Attribute(1 + 12 + 43 - 35)) self.assertTrue(g_2 == g_2_target)
def test_identity_property(self): nodes = [ Node(Attribute([1, 23, 4])), Node(Attribute("stringattr")), Node(Attribute([1])), Node(Attribute(5)) ] edges = [ Edge(nodes[0], nodes[1], Attribute({'dict': 1234})), Edge(nodes[0], nodes[1], Attribute([1, 2, 3])), Edge(nodes[0], nodes[2], Attribute(5)), Edge(nodes[1], nodes[2], Attribute([3, 4, 5])), Edge(nodes[1], nodes[1], Attribute()) ] global_state = Attribute([[1, 2, 3], [5, 6, 7]]) g1 = Graph(nodes, edges, global_state) g1_prime = Graph.from_dict(g1.asdict()) self.assertTrue(g1 == g1_prime)
def test_scalar_edge_loss(self): nodes = [Node(), Node(), Node()] edges = [ Edge(nodes[0], nodes[1], Attribute(Tensor([-50.]))), Edge(nodes[1], nodes[0], Attribute(Tensor([-40.]))), Edge(nodes[0], nodes[2], Attribute(Tensor([1.]))) ] g1 = Graph(nodes, edges) g2 = deepcopy(g1) g2.ordered_edges[0].attr.val = Tensor([-45.]) g2.ordered_edges[1].attr.val = Tensor([-40.]) g2.ordered_edges[2].attr.val = Tensor([1.1]) loss = GraphLoss(e_fn=MSELoss()) loss_val = loss(g1, g2).detach().numpy() target_loss_val = ((-50. + 45.)**2 + (-40. + 40.)**2 + (1 - 1.1)**2) / 3 self.assertTrue(np.isclose(loss_val, target_loss_val))
def bi_directional(g: Graph) -> Graph: g = deepcopy(g) new_edges = set() for e in g.edges: new_edges.add( Edge(sender=e.receiver, receiver=e.sender, attr=e.attr)) g.add_reflexive_edges() for e in new_edges: g.edges.add(e) return g
def test_forward_pass(self): linear_block = LinearIndependentGNBlock(e_config=(4, 8, True), v_config=(1, 1, False), u_config=(16, 16, True)) nodes = Node.from_vals( [torch.randn(1), torch.randn(1), torch.randn(1)]) edges = [ Edge(nodes[0], nodes[1], Attribute(torch.randn(4))), Edge(nodes[1], nodes[2], Attribute(torch.randn(4))), Edge(nodes[2], nodes[1], Attribute(torch.randn(4))) ] g_in = Graph(nodes, edges, Attribute(torch.randn(16))) # noinspection PyUnusedLocal g_out = linear_block(g_in) self.assertTrue( True ) # the assertion is that the forward pass works without errors
def load_graph(self, path: str) -> Graph: # read JSON from page description file json_file_path = glob(os.path.join(path, '*.json')) assert len(json_file_path) == 1, "Number of json files in '{}' must be exactly one.".format(path) json_file_path = json_file_path[0] with open(json_file_path, encoding='utf-8') as json_file: pages_json: List = json.load(json_file) # read screenshot paths imgs = self.load_images(os.path.join(path, 'image')) assert len(pages_json) == len(imgs), "Number of pages and number of screenshots mismatch in '{}'.".format(path) # extract nodes nodes = {} for page_json in pages_json: desktop_img, mobile_img = imgs[page_json['id']-1] node_attribute = PageAttribute.from_json(page_json, desktop_img, mobile_img) url = page_json['base_url'] if url in nodes: logging.debug("Found two nodes with the same URL.") continue node = Node(node_attribute) nodes[url] = node # extract edges edges = set() for page_json in pages_json: url = page_json['base_url'] source_node = nodes[url] for edge_json in page_json.get('urls', []): target_url = edge_json['url'] if target_url not in nodes: logging.debug("Invalid link target URL. Could not find a node that corresponds to it.") continue target_node = nodes[target_url] edge_attribute = LinkAttribute(target_url) edge = Edge(sender=source_node, receiver=target_node, attr=edge_attribute) edges.add(edge) return Graph(nodes=list(nodes.values()), edges=list(edges), attr=Attribute(None))
def test_learn_identity(self): # construct input graph with random values nodes = Node.from_vals( [torch.randn(1), torch.randn(1), torch.randn(1)]) edges = [ Edge(nodes[0], nodes[1], Attribute(torch.randn(4))), Edge(nodes[1], nodes[2], Attribute(torch.randn(4))), Edge(nodes[2], nodes[1], Attribute(torch.randn(4))) ] g_in = Graph(nodes, edges, Attribute(torch.randn(16))) g_target = deepcopy(g_in) block_1 = LinearIndependentGNBlock(e_config=(4, 8, True), v_config=(1, 12, True), u_config=(16, 16, True)) block_2 = LinearIndependentGNBlock(e_config=(8, 4, False), v_config=(12, 1, False), u_config=(16, 16, False)) model = torch.nn.Sequential(block_1, block_2) opt = optim.SGD(model.parameters(), lr=.1, momentum=0) loss_fn = GraphLoss(e_fn=MSELoss(), v_fn=MSELoss(), u_fn=MSELoss()) loss = torch.Tensor([1.]) for step in range(100): model.train() opt.zero_grad() g_out = model(g_in) loss = loss_fn(g_out, g_target) loss.backward() opt.step() final_loss = loss.detach().numpy() print(final_loss) self.assertTrue(final_loss < 1e-3)
def add_all_edges(self, reflexive: bool = True, attribute_generator: Optional[Callable[[Node, Node], Attribute]] = None) -> None: """ Modifies the graph in-place, such that it is fully connected, adding n^n edges, where n is the number of nodes. :param reflexive: Whether to connect nodes to themselves :param attribute_generator: New edges will be given an attribute generated by the attribute generator """ if attribute_generator is None: attribute_generator = lambda sn, rn: Attribute() for n1 in self.nodes: for n2 in self.nodes: if not reflexive and n1 == n2: continue e = Edge(n1, n2, attribute_generator(n1, n2)) self.add_edge(e)
def add_edge(self, new_edge: Edge) -> None: new_edge.to(self.device) self.edges.add(new_edge) self.ordered_edges.append(new_edge) self._check_integrity()
def from_dict(d: Dict) -> 'Graph': nodes_dict = {k: Node.from_dict(node_dict) for k, node_dict in d['nodes'].items()} nodes = list(nodes_dict.values()) edges = [Edge.from_dict(e, nodes_dict) for e in d['edges']] attr = Attribute.from_dict(d['attr']) return Graph(nodes, edges, attr)