def __set_pipeline_dependencies(self, node): for port in node.output_ports: next_ports = self.get_dst_end_of_link(port) for next_port in next_ports: potential_next_rules = self.__get_rules_by_input_port( next_port) for r in potential_next_rules: survived_hs = wildcard_intersect(r.match, node.inverse_match) if not survived_hs.is_empty(): node.set_next_in_pipeline(r, survived_hs, port, next_port) r.set_previous_in_pipeline(node, survived_hs, next_port, port) for port in node.input_ports: previous_ports = self.get_src_end_of_link(port) for previous_port in previous_ports: potential_back_rules = self.__get_rules_by_output_port( previous_port) for r in potential_back_rules: survived_hs = wildcard_intersect(node.match, r.inverse_match) if not survived_hs.is_empty(): r.set_next_in_pipeline(node, survived_hs, previous_port, port) node.set_previous_in_pipeline(r, survived_hs, port, previous_port)
def compose_standard_rules(rule1, rule2): mid_ports = [val for val in rule2["in_ports"] if val in rule1["out_ports"]] if len(mid_ports) == 0: return None # ## finding match # rule 2 is a link rule if rule2["match"] == None: match = wildcard_copy(rule1["match"]) else: # if rule 1 is a fwd or link rule if rule1["mask"] == None: # if rule 1 is a link rule if rule1["match"] == None: match = wildcard_copy(rule2["match"]) else: match = wildcard_intersect(rule2["match"], rule1["match"]) # if rule 1 is a rewrite rule else: match_inv = wildcard_or(\ wildcard_and(rule2["match"], rule1['mask']), \ rule1['inverse_rewrite']) match = wildcard_intersect(match_inv, rule1["match"]) if len(match) == 0: return None # ## finding mask and rewrite mask = None rewrite = None if rule2["mask"] == None: mask = rule1["mask"] rewrite = rule1["rewrite"] elif rule1["mask"] == None: mask = rule2["mask"] rewrite = rule2["rewrite"] else: # mask = mask1 & mask2 # rewrite = (rewrite1 & mask2) | (rewrite2 & !mask2) mask = wildcard_and(rule1["mask"], rule2["mask"]) rewrite = wildcard_or(wildcard_and(rule1["rewrite"], rule2["mask"]), wildcard_and(rule2["rewrite"], wildcard_not(rule2["mask"]))) in_ports = rule1["in_ports"] out_ports = rule2["out_ports"] if rule1["file"] == rule2["file"]: file_name = rule1["file"] else: file_name = "%s , %s" % (rule1["file"], rule2["file"]) lines = rule1["line"] lines.extend(rule2["line"]) result_rule = TF.create_standard_rule(in_ports, match, out_ports, mask, rewrite, file_name, lines) return result_rule
def run(self): while (not self.sigterm.is_set()): try: (r,rule_port,r_port,back) = self.dataQ.get(False) if (back): survived_hs = wildcard_intersect(self.rule.match,r.inverse_match) else: survived_hs = wildcard_intersect(r.match,self.rule.inverse_match) if not survived_hs.is_empty(): self.resultQ.put((survived_hs,r.node_id,rule_port,r_port,back)) self.dataQ.task_done() except: pass self.resultQ.put(None)
def run(self): while (not self.sigterm.is_set()): try: (r, rule_port, r_port, back) = self.dataQ.get(False) if (back): survived_hs = wildcard_intersect(self.rule.match, r.inverse_match) else: survived_hs = wildcard_intersect(r.match, self.rule.inverse_match) if not survived_hs.is_empty(): self.resultQ.put( (survived_hs, r.node_id, rule_port, r_port, back)) self.dataQ.task_done() except: pass self.resultQ.put(None)
def __set_pipeline_dependencies(self, node): for port in node.output_ports: next_ports = self.get_dst_end_of_link(port) for next_port in next_ports: potential_next_rules = self.__get_rules_by_input_port(next_port) for r in potential_next_rules: survived_hs = wildcard_intersect(r.match,node.inverse_match) if not survived_hs.is_empty(): node.set_next_in_pipeline(r,survived_hs,port,next_port) r.set_previous_in_pipeline(node,survived_hs,next_port,port) for port in node.input_ports: previous_ports = self.get_src_end_of_link(port) for previous_port in previous_ports: potential_back_rules = self.__get_rules_by_output_port(previous_port) for r in potential_back_rules: survived_hs = wildcard_intersect(node.match,r.inverse_match) if not survived_hs.is_empty(): r.set_next_in_pipeline(node,survived_hs,previous_port,port) node.set_previous_in_pipeline(r,survived_hs,port,previous_port)
def __update_plumber_for_new_link(self,sPort,dPort): source_routing_tasks = [] potential_src_rules = self.__get_rules_by_output_port(sPort) potential_dest_rules = self.__get_rules_by_input_port(dPort) for s_rule in potential_src_rules: for d_rule in potential_dest_rules: survived_hs = wildcard_intersect(d_rule.match,s_rule.inverse_match) if not survived_hs.is_empty(): s_rule.set_next_in_pipeline(d_rule,survived_hs,sPort,dPort) d_rule.set_previous_in_pipeline(s_rule,survived_hs,dPort,sPort) fwd_pipeline = s_rule.next_in_pipeline[-1] for src_flow in s_rule.source_flow: source_routing_tasks.append((fwd_pipeline,src_flow)) self.__perform_source_routing_tasks(source_routing_tasks)
def run(self): while (not self.sigterm.is_set()): try: (r,is_higher_priority) = self.dataQ.get(False) common_ports = [val for val in r.input_ports if val in self.rule.input_ports] if len(common_ports) == 0: self.dataQ.task_done() continue common_headerspace = wildcard_intersect(self.rule.match, r.match) if len(common_headerspace) == 0: self.dataQ.task_done() continue self.resultQ.put((r.node_id,is_higher_priority,common_headerspace,common_ports)) self.dataQ.task_done() except: pass self.resultQ.put(None)
def __set_influences(self,rule): higher_priority = True table = rule.table for r in self.tables[table]: if rule.node_id == r.node_id: higher_priority = False else: common_ports = [val for val in r.input_ports if val in rule.input_ports] if len(common_ports) == 0: continue common_headerspace = wildcard_intersect(rule.match, r.match) if len(common_headerspace) == 0: continue if (higher_priority): r.influenced_on_rule(rule) rule.affected_by_rule(r, common_headerspace, common_ports) else: rule.influenced_on_rule(r) r.affected_by_rule(rule, common_headerspace, common_ports)
def run(self): while (not self.sigterm.is_set()): try: (r, is_higher_priority) = self.dataQ.get(False) common_ports = [ val for val in r.input_ports if val in self.rule.input_ports ] if len(common_ports) == 0: self.dataQ.task_done() continue common_headerspace = wildcard_intersect( self.rule.match, r.match) if len(common_headerspace) == 0: self.dataQ.task_done() continue self.resultQ.put((r.node_id, is_higher_priority, common_headerspace, common_ports)) self.dataQ.task_done() except: pass self.resultQ.put(None)