def __compare_pktbuffers(self, epktbuf, apktbuf, tc): if GlobalOptions.dryrun: return True if epktbuf is None: logger.info("Packet Compare: Expected Buffer is None. Skipping") return True if apktbuf is None: logger.error("Packet Compare: ExpType:%s, ActType:%s" %\ (type(epktbuf), type(apktbuf))) return False # Make sure we are not accidentally comparing the same object if id(apktbuf) == id(epktbuf): logger.error("ExpBuf and ActBuf are same objects.") return False logger.verbose("Comparing Packets") pcr = comparators.PacketComparator(tc.GetIgnorePacketFields()) pcr.AddExpected(epktbuf, None, 1) pcr.AddReceived(apktbuf, None) pcr.Compare() pcr.ShowResults() return pcr.IsMatch()
def __is_matching_rule(packet_tuples, rule, direction, testcase): if not __is_l3_match(packet_tuples, rule.L3Match, direction, testcase): logger.verbose("l3match fail") return False if not __is_l4_match(packet_tuples, rule.L4Match): logger.verbose("l4match fail") return False return True
def __len__(self): size = 0 for h in self.hdrsorder: hdr = self.headers.__dict__[h] if IsPacketHeader(hdr): logger.verbose("Size of HEADER: %s = " %\ h, hdr.meta.size) size += hdr.meta.size return size
def __get_builder(self, hdr): meta = getattr(hdr, 'meta', None) if meta is None: return None builder_name = '%s_builder' % hdr.meta.id if hasattr(sys.modules[__name__], builder_name): logger.verbose("Using builder %s" % builder_name) builder = getattr(sys.modules[__name__], builder_name) return builder return BASE_builder
def ParseServiceResponse(self, objtype, op, resp): msg = self.MsgResps[objtype].MsgResp(op) grpcmsg = msg() if (not resp) or (resp == -1): logger.info("No service response received in UDS") else: service_resp = types_pb2.ServiceResponseMessage() service_resp.ParseFromString(resp) val = service_resp.Response if val.Is(grpcmsg.DESCRIPTOR): val.Unpack(grpcmsg) logger.verbose(f"Received response over UDS: {service_resp}") return grpcmsg
def __process_list(self, elem_list): fobj = [] for elem in elem_list: if isinstance(elem, objects.FrameworkObject): elemobj = self.__process_yml_obj(elem) elif isinstance(elem, list): logger.verbose("Processing LIST %s" % elem) elemobj = self.__process_list(elem) elif utils.IsTemplateField(elem): logger.verbose("Processing TEMPLATE FIELD %s" % elem) elemobj = objects.TemplateFieldObject(elem) else: elemobj = elem fobj.append(elemobj) return fobj
def build(self, hdr): if hasattr(penscapy, hdr.meta.scapy) == False: logger.error("SCAPY has no header = %s" % hdr.meta.scapy) assert(0) logger.debug("Adding Header: %s" % hdr.meta.scapy) scapyhdl = getattr(penscapy, hdr.meta.scapy) shdr = scapyhdl() for key, value in hdr.fields.__dict__.items(): if objects.IsFrameworkObjectInternalAttr(key): continue logger.verbose(" - %-10s =" % key, value) try: shdr.__setattr__(key, value) except: logger.error("ScapyHeaderBuilder: Failed to set %s.%s to" %\ (hdr.meta.id, key), value) assert(0) return shdr
def IsFilterMatch(self, filters): logger.verbose("IsFilterMatch(): Object %s" % self.GID()) if filters == None: return True for f in filters: attr = f[0] value = f[1] if attr == 'any' and value == None: continue if attr not in self.__dict__: logger.error("Attr:%s not present in %s." %\ (attr, self.__class__)) assert (0) return False fvalue = self.__dict__[attr] if isinstance(fvalue, objects.FrameworkFieldObject): fvalue = fvalue.get() if value.isdigit(): value = int(value) if value == 'None': value = None if value == 'True': value = True if value == 'False': value = False logger.verbose(" - %s: object" % attr, fvalue, "filter:", value) if fvalue != value: return False logger.verbose(" - Found Match !!") return True
def MergeObject(hi, lo): if hi == None: return copy.deepcopy(lo) result = copy.deepcopy(hi) for key in lo.__dict__: logger.verbose("Merging MEMBER = %s" % key) lo_data = lo.__dict__[key] # If key not present in HI, then LO is winner. if key not in hi.__dict__: logger.verbose(" - not present in HI. using LO") result.__dict__[key] = lo_data continue hi_data = hi.__dict__[key] # If key present in HI, if isinstance(lo_data, FrameworkObject): logger.verbose(" - merging recursively.") if hi_data: if isinstance(hi_data, FrameworkObject): # and it is an object, merge recursively merged_data = MergeObjects(hi_data, lo_data) else: merged_data = copy.deepcopy(hi_data) else: merged_data = copy.deepcopy(lo_data) result.__dict__[key] = merged_data else: # if not an object, then HI is winner. result.__dict__[key] = hi_data return result
def __init__(self, valobj): super().__init__() self.typestr = valobj.params[0].upper() self.scopestr = valobj.params[1].upper() self.fieldstr = valobj.params[1].upper() self.keyval = valobj.params[1].upper() self.filterstr = None if len(valobj.params) > 2: self.filterstr = valobj.params[2] self.type = defs.cfgtypes.id(self.typestr) if defs.config_field_scopes.valid(self.scopestr): self.scope = defs.config_field_scopes.id(self.scopestr) self.field = None elif defs.filters.valid(self.fieldstr): self.scope = None self.field = defs.filters.id(self.fieldstr) else: logger.verbose("Config Field Param:%s not a SCOPE or OBJECTFIELD" % self.fieldstr) return
def ValidateSubClass(sub, sup): if sub == None: return assert (sup != None) for key in sub.__dict__: logger.verbose("Validating KEY = %s" % key) data = sub.__dict__[key] # First check if key is present in SUPER. if key not in sup.__dict__: logger.error("Key %s not present in SuperClass" % key) assert (0) if isinstance(data, FrameworkFieldObject): # Fields dont need to be validated. continue # If data is an object, recursively check in SUPER sup_data = sup.__dict__[key] if isinstance(data, FrameworkObject): logger.verbose("Recursively validating object") ValidateSubClass(data, sup_data) return
def __is_l4_match(packet_tuples, l4matchobj): if not l4matchobj.valid: return True if utils.IsICMPProtocol(packet_tuples[2]): if not __is_matching_icmptype(packet_tuples[5], l4matchobj): logger.verbose("l4match icmp type fail") return False if not __is_matching_icmpcode(packet_tuples[6], l4matchobj): logger.verbose("l4match icmp code fail") return False else: if not __is_matching_sport(packet_tuples[3], l4matchobj): logger.verbose("l4match sport fail") return False if not __is_matching_dport(packet_tuples[4], l4matchobj): logger.verbose("l4match dport fail") return False return True
def __process_yml_obj(self, yobj): obj = objects.FrameworkTemplateObject() for fname, fval in yobj.__dict__.items(): if isinstance(fval, list): logger.verbose("Processing LIST %s" % fname) fobj = self.__process_list(fval) elif isinstance(fval, objects.FrameworkObject): logger.verbose("Processing OBJECT %s" % fname) fobj = self.__process_yml_obj(fval) elif utils.IsTemplateField(fval): logger.verbose("Processing TEMPLATE FIELD %s = %s" % (fname, fval)) fobj = objects.TemplateFieldObject(fval) else: logger.verbose("Processing basic FIELD %s =" % fname, fval) fobj = fval obj.__dict__[fname] = fobj return obj
def __is_l3_match(packet_tuples, l3matchobj): if not l3matchobj.valid: return True if not __is_matching_src_ip(packet_tuples[0], l3matchobj): logger.verbose("l3match sip fail") return False if not __is_matching_dst_ip(packet_tuples[1], l3matchobj): logger.verbose("l3match dip fail") return False if not __is_matching_proto(packet_tuples[2], l3matchobj.Proto): logger.verbose("l3match proto fail") return False return True
def Parse(self, path, file_or_extn='*'): match = utils.GetFullIglobPath(file_or_extn, path) logger.verbose("Processing file based on Match %s" % match) return self.__parse_matching(match)
def build(self, hdr): logger.verbose("Updating PAYLOAD of size: %d" % hdr.meta.size) return super().build(hdr)