Beispiel #1
0
def _PPIGuessPayloadClass(p, **kargs):
    """ This function tells the PacketListField how it should extract the
        TLVs from the payload.  We pass cls only the length string
        pfh_len says it needs.  If a payload is returned, that means
        part of the string was unused.  This converts to a Raw layer, and
        the remainder of p is added as Raw's payload.  If there is no
        payload, the remainder of p is added as out's payload.
    """
    if len(p) >= 4:
        t, pfh_len = struct.unpack("<HH", p[:4])
        # Find out if the value t is in the dict _ppi_types.
        # If not, return the default TLV class
        cls = getPPIType(t, "default")
        pfh_len += 4
        out = cls(p[:pfh_len], **kargs)
        if (out.payload):
            out.payload = conf.raw_layer(out.payload.load)
            out.payload.underlayer = out
            if (len(p) > pfh_len):
                out.payload.payload = conf.padding_layer(p[pfh_len:])
                out.payload.payload.underlayer = out.payload
        elif (len(p) > pfh_len):
            out.payload = conf.padding_layer(p[pfh_len:])
            out.payload.underlayer = out
    else:
        out = conf.raw_layer(p, **kargs)
    return out
Beispiel #2
0
def _PPIGuessPayloadClass(p, **kargs):
    """ This function tells the PacketListField how it should extract the
        TLVs from the payload.  We pass cls only the length string
        pfh_len says it needs.  If a payload is returned, that means
        part of the sting was unused.  This converts to a Raw layer, and
        the remainder of p is added as Raw's payload.  If there is no
        payload, the remainder of p is added as out's payload.
    """
    if len(p) >= 4:
        t,pfh_len = struct.unpack("<HH", p[:4])
        # Find out if the value t is in the dict _ppi_types.
        # If not, return the default TLV class
        cls = getPPIType(t, "default")
        pfh_len += 4
        out = cls(p[:pfh_len], **kargs)
        if (out.payload):
            out.payload = conf.raw_layer(out.payload.load)
            if (len(p) > pfh_len):
                out.payload.payload = conf.padding_layer(p[pfh_len:])
        elif (len(p) > pfh_len):
            out.payload = conf.padding_layer(p[pfh_len:])

    else:
        out = conf.raw_layer(p, **kargs)
    return out
Beispiel #3
0
    def dissect(self, s):
        s = self.pre_dissect(s)

        s = self.do_dissect(s)

        s = self.post_dissect(s)
            
        payl,pad = self.extract_padding(s)
        self.do_dissect_payload(payl)
        if pad and conf.padding:
            self.add_payload(conf.padding_layer(pad))
Beispiel #4
0
    def dissect(self, s):
        s = self.pre_dissect(s)

        s = self.do_dissect(s)

        s = self.post_dissect(s)
            
        payl,pad = self.extract_padding(s)
        self.do_dissect_payload(payl)
        if pad and conf.padding:
            self.add_payload(conf.padding_layer(pad))
Beispiel #5
0
def netflowv9_defragment(plist, verb=1):
    """Process all NetflowV9 Packets to match IDs of the DataFlowsets
    with the Headers

    params:
     - plist: the list of mixed NetflowV9 packets.
     - verb: verbose print (0/1)
    """
    # We need the whole packet to be dissected to access field def in NetflowFlowsetV9 or NetflowOptionsFlowsetV9  # noqa: E501
    definitions = {}
    definitions_opts = {}
    ignored = set()
    # Iterate through initial list
    for pkt in plist:  # NetflowDataflowsetV9:
        # Dataflowset definitions
        if NetflowFlowsetV9 in pkt:
            current = pkt
            while NetflowFlowsetV9 in current:
                current = current[NetflowFlowsetV9]
                for ntv9 in current.templates:
                    llist = []
                    for tmpl in ntv9.template_fields:
                        llist.append((tmpl.fieldLength, tmpl.fieldType))
                    if llist:
                        tot_len = sum(x[0] for x in llist)
                        cls = _GenNetflowRecordV9(NetflowRecordV9, llist)
                        definitions[ntv9.templateID] = (tot_len, cls)
                current = current.payload
        # Options definitions
        if NetflowOptionsFlowsetV9 in pkt:
            current = pkt
            while NetflowOptionsFlowsetV9 in current:
                current = current[NetflowOptionsFlowsetV9]
                # Load scopes
                llist = []
                for scope in current.scopes:
                    llist.append(
                        (scope.scopeFieldlength, scope.scopeFieldType))
                scope_tot_len = sum(x[0] for x in llist)
                scope_cls = _GenNetflowRecordV9(NetflowOptionsRecordScopeV9,
                                                llist)
                # Load options
                llist = []
                for opt in current.options:
                    llist.append((opt.optionFieldlength, opt.optionFieldType))
                option_tot_len = sum(x[0] for x in llist)
                option_cls = _GenNetflowRecordV9(NetflowOptionsRecordOptionV9,
                                                 llist)
                # Storage
                definitions_opts[current.templateID] = (scope_tot_len,
                                                        scope_cls,
                                                        option_tot_len,
                                                        option_cls)
                current = current.payload
        # Dissect flowsets
        if NetflowDataflowsetV9 in pkt:
            datafl = pkt[NetflowDataflowsetV9]
            tid = datafl.templateID
            if tid not in definitions and tid not in definitions_opts:
                ignored.add(tid)
                continue
            # All data is stored in one record, awaiting to be split
            # If fieldValue is available, the record has not been
            # defragmented: pop it
            try:
                data = datafl.records[0].fieldValue
                datafl.records.pop(0)
            except (IndexError, AttributeError):
                continue
            res = []
            # Flowset record
            # Now, according to the flow/option data,
            # let's re-dissect NetflowDataflowsetV9
            if tid in definitions:
                tot_len, cls = definitions[tid]
                while len(data) >= tot_len:
                    res.append(cls(data[:tot_len]))
                    data = data[tot_len:]
                # Inject dissected data
                datafl.records = res
                datafl.do_dissect_payload(data)
            # Options
            elif tid in definitions_opts:
                (scope_len, scope_cls, option_len,
                 option_cls) = definitions_opts[tid]
                # Dissect scopes
                if scope_len:
                    res.append(scope_cls(data[:scope_len]))
                if option_len:
                    res.append(
                        option_cls(data[scope_len:scope_len + option_len]))
                if len(data) > scope_len + option_len:
                    res.append(
                        conf.padding_layer(data[scope_len + option_len:]))
                # Inject dissected data
                datafl.records = res
                datafl.name = "Netflow DataFlowSet V9 - OPTIONS"
    if conf.verb >= 1 and ignored:
        warning("Ignored templateIDs (missing): %s" % list(ignored))
    return plist
Beispiel #6
0
def _netflowv9_defragment_packet(pkt, definitions, definitions_opts, ignored):
    """Used internally to process a single packet during defragmenting"""
    # Dataflowset definitions
    if NetflowFlowsetV9 in pkt:
        current = pkt
        while NetflowFlowsetV9 in current:
            current = current[NetflowFlowsetV9]
            for ntv9 in current.templates:
                llist = []
                for tmpl in ntv9.template_fields:
                    llist.append((tmpl.fieldLength, tmpl.fieldType))
                if llist:
                    tot_len = sum(x[0] for x in llist)
                    cls = _GenNetflowRecordV9(NetflowRecordV9, llist)
                    definitions[ntv9.templateID] = (tot_len, cls)
            current = current.payload
    # Options definitions
    if NetflowOptionsFlowsetV9 in pkt:
        current = pkt
        while NetflowOptionsFlowsetV9 in current:
            current = current[NetflowOptionsFlowsetV9]
            # Load scopes
            llist = []
            for scope in current.scopes:
                llist.append((
                    scope.scopeFieldlength,
                    scope.scopeFieldType
                ))
            scope_tot_len = sum(x[0] for x in llist)
            scope_cls = _GenNetflowRecordV9(
                NetflowOptionsRecordScopeV9,
                llist
            )
            # Load options
            llist = []
            for opt in current.options:
                llist.append((
                    opt.optionFieldlength,
                    opt.optionFieldType
                ))
            option_tot_len = sum(x[0] for x in llist)
            option_cls = _GenNetflowRecordV9(
                NetflowOptionsRecordOptionV9,
                llist
            )
            # Storage
            definitions_opts[current.templateID] = (
                scope_tot_len, scope_cls,
                option_tot_len, option_cls
            )
            current = current.payload
    # Dissect flowsets
    if NetflowDataflowsetV9 in pkt:
        datafl = pkt[NetflowDataflowsetV9]
        tid = datafl.templateID
        if tid not in definitions and tid not in definitions_opts:
            ignored.add(tid)
            return
        # All data is stored in one record, awaiting to be split
        # If fieldValue is available, the record has not been
        # defragmented: pop it
        try:
            data = datafl.records[0].fieldValue
            datafl.records.pop(0)
        except (IndexError, AttributeError):
            return
        res = []
        # Flowset record
        # Now, according to the flow/option data,
        # let's re-dissect NetflowDataflowsetV9
        if tid in definitions:
            tot_len, cls = definitions[tid]
            while len(data) >= tot_len:
                res.append(cls(data[:tot_len]))
                data = data[tot_len:]
            # Inject dissected data
            datafl.records = res
            datafl.do_dissect_payload(data)
        # Options
        elif tid in definitions_opts:
            (scope_len, scope_cls,
                option_len, option_cls) = definitions_opts[tid]
            # Dissect scopes
            if scope_len:
                res.append(scope_cls(data[:scope_len]))
            if option_len:
                res.append(
                    option_cls(data[scope_len:scope_len + option_len])
                )
            if len(data) > scope_len + option_len:
                res.append(
                    conf.padding_layer(data[scope_len + option_len:])
                )
            # Inject dissected data
            datafl.records = res
            datafl.name = "Netflow DataFlowSet V9 - OPTIONS"
Beispiel #7
0
def netflowv9_defragment(plist, verb=1):
    """Process all NetflowV9 Packets to match IDs of the DataFlowsets
    with the Headers

    params:
     - plist: the list of mixed NetflowV9 packets.
     - verb: verbose print (0/1)
    """
    # We need the whole packet to be dissected to access field def in NetflowFlowsetV9 or NetflowOptionsFlowsetV9  # noqa: E501
    definitions = {}
    definitions_opts = {}
    ignored = set()
    # Iterate through initial list
    for pkt in plist:  # NetflowDataflowsetV9:
        # Dataflowset definitions
        if NetflowFlowsetV9 in pkt:
            current = pkt
            while NetflowFlowsetV9 in current:
                current = current[NetflowFlowsetV9]
                for ntv9 in current.templates:
                    llist = []
                    for tmpl in ntv9.template_fields:
                        llist.append((tmpl.fieldLength, tmpl.fieldType))
                    if llist:
                        tot_len = sum(x[0] for x in llist)
                        cls = _GenNetflowRecordV9(NetflowRecordV9, llist)
                        definitions[ntv9.templateID] = (tot_len, cls)
                current = current.payload
        # Options definitions
        if NetflowOptionsFlowsetV9 in pkt:
            current = pkt
            while NetflowOptionsFlowsetV9 in current:
                current = current[NetflowOptionsFlowsetV9]
                # Load scopes
                llist = []
                for scope in current.scopes:
                    llist.append((
                        scope.scopeFieldlength,
                        scope.scopeFieldType
                    ))
                scope_tot_len = sum(x[0] for x in llist)
                scope_cls = _GenNetflowRecordV9(
                    NetflowOptionsRecordScopeV9,
                    llist
                )
                # Load options
                llist = []
                for opt in current.options:
                    llist.append((
                        opt.optionFieldlength,
                        opt.optionFieldType
                    ))
                option_tot_len = sum(x[0] for x in llist)
                option_cls = _GenNetflowRecordV9(
                    NetflowOptionsRecordOptionV9,
                    llist
                )
                # Storage
                definitions_opts[current.templateID] = (
                    scope_tot_len, scope_cls,
                    option_tot_len, option_cls
                )
                current = current.payload
        # Dissect flowsets
        if NetflowDataflowsetV9 in pkt:
            datafl = pkt[NetflowDataflowsetV9]
            tid = datafl.templateID
            if tid not in definitions and tid not in definitions_opts:
                ignored.add(tid)
                continue
            # All data is stored in one record, awaiting to be split
            # If fieldValue is available, the record has not been
            # defragmented: pop it
            try:
                data = datafl.records[0].fieldValue
                datafl.records.pop(0)
            except (IndexError, AttributeError):
                continue
            res = []
            # Flowset record
            # Now, according to the flow/option data,
            # let's re-dissect NetflowDataflowsetV9
            if tid in definitions:
                tot_len, cls = definitions[tid]
                while len(data) >= tot_len:
                    res.append(cls(data[:tot_len]))
                    data = data[tot_len:]
                # Inject dissected data
                datafl.records = res
                datafl.do_dissect_payload(data)
            # Options
            elif tid in definitions_opts:
                (scope_len, scope_cls,
                    option_len, option_cls) = definitions_opts[tid]
                # Dissect scopes
                if scope_len:
                    res.append(scope_cls(data[:scope_len]))
                if option_len:
                    res.append(
                        option_cls(data[scope_len:scope_len + option_len])
                    )
                if len(data) > scope_len + option_len:
                    res.append(
                        conf.padding_layer(data[scope_len + option_len:])
                    )
                # Inject dissected data
                datafl.records = res
                datafl.name = "Netflow DataFlowSet V9 - OPTIONS"
    if conf.verb >= 1 and ignored:
        warning("Ignored templateIDs (missing): %s" % list(ignored))
    return plist