def net(params): count,ntype,nodes,transitions,arcs,wp,_ = params xml_str = "" xml_str += "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n" xml_str += "<pnml xmlns=\"http://www.informatik.hu-berlin.de/top/pnml/ptNetb\">\n" controller = Node(-1, "Controller", "1") xml_str += controller.shared_to_file() for transition in transitions: xml_str += transition.shared_to_file() xml, reach_query = routing(count, ntype, nodes, transitions, arcs) xml_str += xml xml, switch_count = BNC.switches_v2(nodes, transitions) xml_str += xml xml_str += ANC.visited(nodes, transitions) xml, wp_query = ANC.waypoint(nodes[0].id, nodes[-1].id, wp) xml_str += xml xml, loop_query = ANC.loopfreedom(nodes) xml_str+= xml xml_str += ANC.combinedQuery(reach_query, wp_query, loop_query) xml_str += " <k-bound bound=\"3\"/>\n" xml_str += " <feature isGame=\"true\" isTimed=\"true\"/>\n" xml_str += "</pnml>" return xml_str
def generate_worst (count): start = time.time() #Generating initial and final nodes #also path configurations based on size acc = count count = (int((count-1)/3)) * 3 + 1 print(count) series = int((count-1)/3) nodes = [] init_node = Node(0, "P0") init_node.init_route = 1 final_node = Node(count-1, f"P{count-1}") nodes.append(init_node) for i in range(count-2): nodes.append(Node(i+1,f"P{i+1}")) nodes[-1].init_route = i+2 nodes.append(final_node) init_route = [] final_route = [] for node in nodes[:-1]: init_route.append([node.id, node.init_route]) for i in range(series): for t in pospath(i*3): node = next((x for x in nodes if x.id == t[0]), None) node.final_route = t[1] final_route.extend(pospath(i*3)) # verified: waypoint = literally anything # non verified: waypoint = something after the reach.. wp = final_node.id - 1 f = open(f"data/time/Worst/Worst_{acc}_PREP.txt", "w") f.write(str(time.time() - start)) f.close() #making the json file json_maker("Worst", acc, init_route, final_route, init_node.id, final_node.id, wp) #making the ltl file Ltl.make_ltl("Worst", acc) transitions = [] arcs = [] for node in nodes: if node.init_route: t = Transition(f"T{node.id}_{node.init_route}", node.id, node.init_route,f"T{node.id}_{node.init_route}") transitions.append(t) a = Full_Arc(node, next((x for x in nodes if x.id == node.init_route), None), t) arcs.append(a) if node.final_route: t = Transition(f"T{node.id}_{node.final_route}", node.id, node.final_route,f"T{node.id}_{node.final_route}") transitions.append(t) a = Full_Arc(node, next((x for x in nodes if x.id == node.final_route), None), t) arcs.append(a) return count,"Worst",nodes,transitions,arcs,wp,acc
def routing_configuration(network, jsonParser, nodes, transitions): cap = len(nodes) * 10 xml_str = "" controller = Node(-1, "Controller", "1") nodes.insert(0, controller) xml_str += controller.shared_to_file() for transition in transitions: xml_str += transition.shared_to_file() xml_str += " <net active=\"true\" id=\"{}\" type=\"P/T net\">\n".format("Routings") xml_str += make_label(0, 0, f"Extract from {network}.\n-Node Count: {len(nodes) - 1}\n-Transition Count: {len(transitions)}\n\n-Initial routing length: {len(jsonParser.init_route)}\n-Final routing length: {len(jsonParser.final_route)}\n\n\nPress Shift+D followed by Enter") xml_str += make_label(200, 0, f"Initial routing: {str(jsonParser.init_route)}\n\nFinal routing: {str(jsonParser.final_route)}") for node in nodes: xml_str += node.to_file() for transition in transitions: xml_str += transition.to_file() arcs = [] for t in transitions: a = Full_Arc(get_node(t.source, nodes), get_node(t.target, nodes), t) arcs.append(a) # inject packet inject = Transition(-2, nodes[0].id, jsonParser.init_route[0][0], "Inject_packet", "1") xml_str += inject.to_file() aa = Full_Arc(get_node(inject.source, nodes), get_node(inject.target, nodes), inject) arcs.append(aa) for arc in arcs: xml_str += arc.to_file() xml_str += "<place displayName=\"true\" id=\"Clock\" initialMarking=\"1\" invariant=\"<= 0\" name=\"Clock\" nameOffsetX=\"0\" nameOffsetY=\"0\" positionX=\"465\" positionY=\"45\"/>" injectpv = Node("P_u_visited", "P_u_visited", "1") xml_str += "<place displayName=\"true\" id=\"P_u_visited\" initialMarking=\"0\" invariant=\"< inf\" name=\"P_u_visited\" nameOffsetX=\"0\" nameOffsetY=\"0\" positionX=\"285\" positionY=\"180\"/>" xml_str += Outbound_Arc(inject, injectpv).to_file() for t in transitions: if t.source == jsonParser.init_route[0][0]: xml_str += Inbound_Arc(injectpv, t, "tapnInhibitor", 2).to_file() xml_str += " </net>\n\n" #AG(!(deadlock)∨Pv′≥1) #q = "AG ({}.{} >= 1 or Routings.P{} = 0)".format(net, node.notation, final_id) reach_query = "(!(deadlock) or Routings.P{}>=1)".format(jsonParser.reachability["finalNode"]) q = "AG{}".format(reach_query) query = "<query active=\"true\" approximationDenominator=\"2\" capacity=\"10000\" discreteInclusion=\"false\" enableOverApproximation=\"false\" enableUnderApproximation=\"false\" extrapolationOption=\"null\" gcd=\"false\" hashTableSize=\"null\" inclusionPlaces=\"*NONE*\" name=\"{}\" overApproximation=\"true\" pTrie=\"true\" query=\"{}\" reduction=\"true\" reductionOption=\"VerifyTAPNdiscreteVerification\" searchOption=\"DFS\" symmetry=\"true\" timeDarts=\"false\" traceOption=\"NONE\" useStubbornReduction=\"true\"/>\n\n".format("Reach_P{}".format(jsonParser.reachability["finalNode"]), q) xml_str += query nodes = nodes[1:] return xml_str, reach_query
def full_network(g, network): nodes_raw = list(g.nodes(data=True)) edges_raw = list(g.edges) nodes = [] transitions = [] xml_str = "" for i in nodes_raw: n = Node(i[0], "P{}".format(i[0])) nodes.append(n) for i in edges_raw: t = Transition(edges_raw.index(i), i[0], i[1], "T{}_{}".format(i[0], i[1])) transitions.append(t) xml_str += (" <net active=\"false\" id=\"{}\" type=\"P/T net\">\n".format(network)) xml_str += make_label(0, 0, f"Network: {network}\nNode Count: {len(nodes)}\nTransition Count: {len(transitions)}\n\nPress Shift+D followed by Enter") for node in nodes: xml_str += (node.to_file()) for transition in transitions: xml_str += (transition.to_file()) arcs = [] for t in transitions: a = Full_Arc(get_node(t.source, nodes), get_node(t.target, nodes), t) arcs.append(a) for arc in arcs: xml_str += arc.to_file() xml_str += (" </net>\n\n") return xml_str
def routing(count, ntype, nodes, transitions, arcs): cap = len(nodes) * 10 controller = Node(-1, "Controller", "1") xml_str = "" xml_str += " <net active=\"true\" id=\"{}\" type=\"P/T net\">\n".format("Routings") if ntype == "Shared": path_len = int ((count-1)/3*2+1) elif ntype == "Disjoint": path_len = (int((count-3)/4) + 1) * 2 + 1 else: path_len = count xml_str += make_label(0, 0, f"{ntype} network with {count} total nodes.\n\n-Initial routing length: {path_len}\n-Final routing length: {path_len}\n\n\nPress Shift+D followed by Enter") #xml_str += make_label(200, 0, f"Initial routing: {str(jsonParser.init_route)}\n\nFinal routing: {str(jsonParser.final_route)}") for node in nodes: xml_str += node.to_file() xml_str += controller.to_file() for transition in transitions: xml_str += transition.to_file() # inject packet inject = Transition(-2, controller.id, nodes[0].id, "Inject_packet", "1") xml_str += inject.to_file() aa = Full_Arc(controller, nodes[0], inject) arcs.append(aa) for arc in arcs: xml_str += arc.to_file() xml_str += "<place displayName=\"true\" id=\"Clock\" initialMarking=\"1\" invariant=\"<= 0\" name=\"Clock\" nameOffsetX=\"0\" nameOffsetY=\"0\" positionX=\"285\" positionY=\"45\"/>" injectpv = Node("P_u_visited", "P_u_visited", "1") xml_str += "<place displayName=\"true\" id=\"P_u_visited\" initialMarking=\"0\" invariant=\"< inf\" name=\"P_u_visited\" nameOffsetX=\"0\" nameOffsetY=\"0\" positionX=\"285\" positionY=\"180\"/>" xml_str += Outbound_Arc(inject, injectpv).to_file() for t in transitions: if t.source == nodes[0].id: xml_str += Inbound_Arc(injectpv, t, "tapnInhibitor", 2).to_file() xml_str += " </net>\n\n" reach_query = "(!(deadlock) or P{}_visited.P{}_visited>=1)".format(nodes[-1].id, nodes[-1].id) q = "AG{}".format(reach_query) query = "<query active=\"true\" approximationDenominator=\"2\" capacity=\"{}\" discreteInclusion=\"false\" enableOverApproximation=\"false\" enableUnderApproximation=\"false\" extrapolationOption=\"null\" gcd=\"false\" hashTableSize=\"null\" inclusionPlaces=\"*NONE*\" name=\"{}\" overApproximation=\"true\" pTrie=\"true\" query=\"{}\" reduction=\"true\" reductionOption=\"VerifyTAPNdiscreteVerification\" searchOption=\"DFS\" symmetry=\"true\" timeDarts=\"false\" traceOption=\"NONE\" useStubbornReduction=\"true\"/>\n\n".format(cap,"Reach_P{}".format(nodes[-1].id), q) xml_str += query return xml_str, reach_query
def switches_v2(nodes, transitions): controller = Node(-1, "Controller", "1") controller.x = 100 controller.y = 100 xml_str = "" switch_nodes = [] for node in nodes: if node.init_route and node.final_route: if node.init_route != node.final_route: switch_nodes.append(node) #if node.init_route and node.final_route: # print(f"Node {node.id}: Full switch") #elif node.init_route and not node.final_route: # print(f"Node {node.id}: Half initial switch") #elif not node.init_route and node.final_route: # print(f"Node {node.id}: Half final switch") for node in switch_nodes: update_transition = Transition(f"Update_{node.notation}", controller, None, f"Update_{node.notation}") update_transition.x , update_transition.y = 300, 100 initial_place = Node(f"P{node.id}_initial", f"P{node.id}_initial", "1") initial_place.x, initial_place.y = 500, 100 final_place = Node(f"P{node.id}_final", f"P{node.id}_final") final_place.x, final_place.y = 500, 300 #print(f"P{node.id} init: {node.init_route} final: {node.final_route}") initial_transition, final_transition = None, None if node.init_route != None: initial_transition = next((x for x in transitions if x.source == node.id and x.target == node.init_route), None) initial_transition.x, initial_transition.y = 700, 100 if node.final_route != None: final_transition = next((x for x in transitions if x.source == node.id and x.target == node.final_route), None) final_transition.x, final_transition.y = 700, 300 xml_str += f" <net active=\"true\" id=\"{node.notation}_Switch\" type=\"P/T net\">\n" xml_str += controller.to_file() xml_str += update_transition.to_file() xml_str += Inbound_Arc(controller, update_transition, "timed", "1").to_file() xml_str += Outbound_Arc(update_transition, controller).to_file() xml_str += initial_place.to_file() xml_str += final_place.to_file() xml_str += Inbound_Arc(initial_place, update_transition, "timed", "1").to_file() xml_str += Outbound_Arc(update_transition, final_place).to_file() if initial_transition: xml_str += initial_transition.to_file() xml_str += Inbound_Arc(initial_place, initial_transition, "timed", "1").to_file() xml_str += Outbound_Arc(initial_transition, initial_place).to_file() if final_transition: xml_str += final_transition.to_file() xml_str += Inbound_Arc(final_place, final_transition, "timed", "1").to_file() xml_str += Outbound_Arc(final_transition, final_place).to_file() xml_str += " </net>\n\n" return xml_str, len(switch_nodes)
def generate_disjoint (count): start = time.time() #Generating initial and final nodes #also path configurations based on size acc = count count = (int((count - 3) / 4) + 1) * 4 + 3 init_node = Node(0, "P0") final_node = Node(count-1, f"P{count-1}") mid_node = Node(count-2, f"P{count-2}") path_count = (int((count-3)/4) + 1) * 2 + 1 node_path_count = (int((count-3)/4)) * 2 path1 = [] path2 = [] #Creating nodes for the 2 paths for i in range(node_path_count): path1.append(Node(i+1, f"P{i+1}")) path2.append(Node(i+1+node_path_count, f"P{i+1+node_path_count}")) init_route = [] final_route = [] init_node.init_route = path1[0].id init_node.final_route = path2[0].id init_route.append([init_node.id,init_node.init_route]) final_route.append([init_node.id,init_node.final_route]) for i in range(int(node_path_count/2) -1): path1[i].init_route = path1[i+1].id init_route.append([path1[i].id, path1[i+1].id]) path2[i].final_route = path2[i+1].id final_route.append([path2[i].id, path2[i+1].id]) path1[int(node_path_count/2) -1].init_route = mid_node.id path2[int(node_path_count/2) -1].final_route = mid_node.id init_route.append([path1[int(node_path_count/2) -1].id,mid_node.id]) final_route.append([path2[int(node_path_count/2) -1].id,mid_node.id]) mid_node.init_route = path1[int(node_path_count/2)].id mid_node.final_route = path2[int(node_path_count/2)].id init_route.append([mid_node.id, path1[int(node_path_count/2)].id]) final_route.append([mid_node.id, path2[int(node_path_count/2)].id]) for i in range(int(node_path_count/2), node_path_count - 1): path1[i].init_route = path1[i+1].id path2[i].final_route = path2[i+1].id init_route.append([path1[i].id, path1[i+1].id]) final_route.append([path2[i].id, path2[i+1].id]) path1[-1].init_route = final_node.id path2[-1].final_route = final_node.id init_route.append([path1[-1].id, final_node.id]) final_route.append([path2[-1].id, final_node.id]) #Making a json file out of the routings #print(f"Init path: {init_route}") #print(f"Final path: {final_route}") # verified: waypoint = mid_node.id # non verified: waypoint = literally anything else wp = mid_node.id f = open(f"data/time/Disjoint/Disjoint_{acc}_PREP.txt", "w") f.write(str(time.time() - start)) f.close() print(f"Prep time Disjoint size {acc}: {time.time()-start} seconds") #making the json file json_maker("Disjoint", acc, init_route, final_route, init_node.id, final_node.id, wp) #making the ltl file Ltl.make_ltl("Disjoint", acc) #Generating arcs and transitions based on nodes nodes = [] nodes.append(init_node) nodes.extend(path1+path2) nodes.append(mid_node) nodes.append(final_node) #for node in nodes: # print(f"P{node.id} init: {node.init_route} final: {node.final_route}") transitions = [] arcs = [] for node in nodes: if node.init_route: t = Transition(f"T{node.id}_{node.init_route}", node.id, node.init_route,f"T{node.id}_{node.init_route}") transitions.append(t) a = Full_Arc(node, next((x for x in nodes if x.id == node.init_route), None), t) arcs.append(a) if node.final_route: t = Transition(f"T{node.id}_{node.final_route}", node.id, node.final_route,f"T{node.id}_{node.final_route}") transitions.append(t) a = Full_Arc(node, next((x for x in nodes if x.id == node.final_route), None), t) arcs.append(a) return count,"Disjoint",nodes,transitions,arcs,wp,acc
def generate_shared(count): start = time.time() #Generating initial and final nodes #also path configurations based on size acc = count count = (int((count-1)/3)) * 3 + 1 common_count = int ((count - 4) / 3) common = [] path_count = int ((count - 2 - common_count)/2) path1 = [] path2 = [] init_node = Node(0, "P0") final_node = Node(count-1, f"P{count-1}") #Making the common nodes for i in range(common_count): common.append(Node(i+1,f"P{i+1}")) common.append(final_node) #Making the routings for i in range(path_count): path1.append(Node(i+path_count, f"P{i+path_count}")) path1[-1].init_route = common[i].id path2.append(Node(i+2*path_count, f"P{i+2*path_count}")) path2[-1].final_route = common[i].id for i in range(common_count): common[i].init_route = path1[i+1].id common[i].final_route = path2[i+1].id init_node.init_route = path1[0].id init_node.final_route = path2[0].id #Making a json file out of the routings init_route = [] final_route = [] init_route.append([init_node.id, path1[0].id]) final_route.append([init_node.id, path2[0].id]) for i in range (path_count -1): init_route.append([path1[i].id, path1[i].init_route]) init_route.append([path1[i].init_route, path1[i+1].id]) final_route.append([path2[i].id, path2[i].final_route]) final_route.append([path2[i].final_route, path2[i+1].id]) init_route.append([path1[-1].id, final_node.id]) final_route.append([path2[-1].id, final_node.id]) # verified: waypoint = 1 # non verified: waypoint = path_count wp = 1 f = open(f"data/time/Shared/Shared_{acc}_PREP.txt", "w") f.write(str(time.time() - start)) f.close() print(f"Prep time Shared size {acc}: {time.time()-start} seconds") #making the json file json_maker("Shared", acc, init_route, final_route, init_node.id, final_node.id, wp) #making the ltl file Ltl.make_ltl("Shared", acc) #Generating arcs and transitions nodes = [] nodes.append(init_node) nodes.extend(common[:-1] + path1 + path2) nodes.append(final_node) transitions = [] arcs = [] for node in nodes: if node.init_route: t = Transition(f"T{node.id}_{node.init_route}", node.id, node.init_route,f"T{node.id}_{node.init_route}") transitions.append(t) a = Full_Arc(node, next((x for x in nodes if x.id == node.init_route), None), t) arcs.append(a) if node.final_route: t = Transition(f"T{node.id}_{node.final_route}", node.id, node.final_route,f"T{node.id}_{node.final_route}") transitions.append(t) a = Full_Arc(node, next((x for x in nodes if x.id == node.final_route), None), t) arcs.append(a) return count,"Shared",nodes,transitions,arcs,wp,acc
def attach_node(self, excludedRows=(), parentNode=None, direction="left"): """ Membuat tree dengan memasang node-node Parameters: excludedRows (tuple): data yang tidak boleh dimasukkan pada perhitungan parentNode (Node): parent node direction (string): penanda apakah child node yang akan dipasang di sebelah kiri atau kanan Returns: void """ attrThresholds = self.pruning(excludedRows) if len(attrThresholds) > 0: attr, threshold = attrThresholds[0][0], attrThresholds[0][1] # create new node instance newNode = Node(attr, threshold, "root" if self.tree is None else "branch") if self.tree is None: self.tree = newNode # get left and right childs for the node left, right = self.get_child_nodes(attr, threshold, excludedRows) # get data exclusion for each child leftExclusion = left if excludedRows == () else np.append( left, excludedRows) rightExclusion = right if excludedRows == () else np.append( right, excludedRows) # get left and right data leftData = self.npdata[left] rightData = self.npdata[right] # count label occurence for each child leftLabel, leftCount = np.unique(leftData[:, -1], return_counts=True) rightLabel, rightCount = np.unique(rightData[:, -1], return_counts=True) leftDataCount = len(left) rightDataCount = len(right) labels = np.unique(np.append(leftLabel, rightLabel)) labelCount = len(labels) # attach child node if parentNode is not None: if direction == "left": parentNode.set_left_child(newNode) elif direction == "right": parentNode.set_right_child(newNode) # set node type to label if there is only one label if labelCount == 1: newNode.set_type("leaf") newNode.set_label(labels[0]) print(f"Leaf attached: {labels[0]}") elif labelCount == 2: if len(leftLabel) == 1: leftLeafNode = Node("Label", threshold, "leaf") leftLeafNode.set_label(leftLabel[0]) newNode.set_left_child(leftLeafNode) print(f"Leaf attached: {leftLabel[0]}") if len(rightLabel) == 1: rightLeafNode = Node("Label", threshold, "leaf") rightLeafNode.set_label(rightLabel[0]) newNode.set_right_child(rightLeafNode) print(f"Leaf attached: {rightLabel[0]}") else: if rightDataCount > 0: if rightDataCount == 1: rightLeafNode = Node("Label", threshold, "leaf") rightLeafNode.set_label(rightLabel[0]) newNode.set_right_child(rightLeafNode) print(f"Leaf attached: {rightLabel[0]}") else: self.attach_node(leftExclusion, newNode, "right") if leftDataCount > 0: if rightDataCount == 1: leftLeafNode = Node("Label", threshold, "leaf") leftLeafNode.set_label(leftLabel[0]) newNode.set_left_child(leftLeafNode) print(f"Leaf attached: {leftLabel[0]}") else: self.attach_node(rightExclusion, newNode, "left")
def parse_nodes(nodes_raw, marking): nodes = [] for i in nodes_raw: n = Node(i, f"P{i}", marking) nodes.append(n) return nodes