예제 #1
1
def ReadPcap(file):
    try:
      # scapy.utils.PcapReader
      print('Reading: %s' % file)
      pcap = PcapReader(file)
    except:
      # yes, logging.exception should be used here, but it doesn't add any value
      print('Not a valid pcap file: %s' % file)
      raise
  
    # Build a list of streams that match the search regex
    num = 0
    while pcap:
      try:
        packet = pcap.read_packet()
        if not 'IP' in packet:
            continue;
        srcip = packet['IP'].src
        dstip = packet['IP'].dst
        sport = packet[2].sport
        dport = packet[2].dport
        length = packet[2].len
        print srcip, dstip, sport, dport, length
        #print packet.summary();
        #print binascii.hexlify(packet[2].payload['Raw'].load)
        num+=1
        if num%1000 == 0:
            print num
      except TypeError:
        print 'exception'
        break

    pcap.close()
    del pcap
def extract_attributes(pcap_file,
                       attr_list,
                       filter_attributes=None,
                       filter_=None):
    packets_reader = PcapReader(pcap_file)

    try:
        attr_name_list = map(format_attr_name, attr_list)
        while 1:
            packet = packets_reader.read_packet()
            if not packet:
                break

            attributes = dict()
            for i, attr in enumerate(attr_list):
                attributes[attr_name_list[i]] = extract_attr(packet, attr)

            ok = True
            if filter_attributes:
                for filter_attr in filter_attributes:
                    if isinstance(filter_attr, types.StringTypes):
                        filter_attr = format_attr_name(filter_attr)
                        if filter_attr not in attributes or not attributes[
                                filter_attr]:
                            ok = False
                            break

            if ok and filter_:
                ok = filter_(attributes)

            if ok:
                yield attributes
    finally:
        packets_reader.close()
예제 #3
0
파일: parser.py 프로젝트: cjelsa/iex_parser
 def __init__(self,
              filename: str,
              protocol: str,
              queue_length=25000) -> None:
     self.reader = PcapReader(filename)
     self.protocol = protocol
     self.queue_length = queue_length
예제 #4
0
def scapy_io(f_in,f_out):
    f = PcapReader(f_in)
    o = PcapWriter(f_out)
    pkt = f.read_packet()
    while pkt is not None:
        o.write(pkt)
        pkt = f.read_packet()
    f.close()
    o.close()
예제 #5
0
    def get_pcap_timestamp(self, pcapfile):
        try:
            pcap_content = PcapReader(pcapfile)
            list_timestamp = list(pcap_content)
            start_time = list_timestamp[0].time
            end_time = list_timestamp[-1].time
        except IOError:
            pcap_content = PcapReader(pcapfile)
            list_timestamp = list(pcap_content)
            start_time = list_timestamp[0].time
            end_time = list_timestamp[-1].time

        print("{0}: {1} - {2}".format(pcapfile, start_time, end_time))
        return (start_time + end_time) / 2
예제 #6
0
def parse_pcap_into_npy(FN, count=-1, debug=False):
    i = 0
    packets_numerical = []
    if count == -1: count = 1e100
    with PcapReader(FN) as pcap_reader:
        for pkt in pcap_reader:
            i += 1
            if i > count: break
            if i % 10000 == 0:
                if debug:
                    print('Progress: %d' % i)
            pdict = {}
            pdict['ig_intr_md.ingress_mac_tstamp'] = pkt.time
            if pkt.haslayer(IP):
                pdict['hdr.ipv4.ttl'] = pkt[IP].ttl
                pdict['hdr.ipv4.protocol'] = pkt[IP].proto
                pdict['hdr.ipv4.checksum'] = pkt[IP].chksum
                pdict['hdr.ipv4.src_addr'] = pkt[IP].src
                pdict['hdr.ipv4.dst_addr'] = pkt[IP].dst

            if pkt.haslayer(TCP):
                pdict['hdr.tcp.src_port'] = pkt[TCP].sport
                pdict['hdr.tcp.dst_port'] = pkt[TCP].dport
                pdict['hdr.tcp.checksum'] = pkt[TCP].chksum

            if pkt.haslayer(UDP):
                pdict['hdr.udp.src_port'] = pkt[UDP].sport
                pdict['hdr.udp.dst_port'] = pkt[UDP].dport
                pdict['hdr.udp.checksum'] = pkt[UDP].chksum

            def to_list(p):
                line = []
                for h in harr:
                    if (h not in p) or (p[h] == None):
                        line.append(-1)
                    else:
                        line.append(p[h])
                #timestamp
                line[0] = np.float128(line[0])
                #ip
                if line[1] != -1:
                    line[1] = dottedQuadToNum(line[1])
                if line[2] != -1:
                    line[2] = dottedQuadToNum(line[2])
                #everything else
                for i in range(3, 12):
                    line[i] = int(line[i])
                return line

            packets_numerical.append(tuple(to_list(pdict)))
    # convert to np array
    if debug:
        print('Parsed %d packets. Allocating numpy ndarray...' % i)
    arr = np.zeros((len(packets_numerical)),
                   dtype=np.dtype('f16,u4,u4,u2,u2,u2,u2,u2,u2,u2,u2,u2'))
    if debug:
        print('Allocated nparray shape=%s' % arr.shape)
    for i in range(len(packets_numerical)):
        arr[i] = packets_numerical[i]
    return arr
예제 #7
0
def load_packets(pcap_filename, limit_packets=1000):
    """Load packets from provided pcap file."""
    start_time = time.time()
    packets = []
    try:
        if os.path.getsize(pcap_filename) > 100 * 2**20:
            print(
                "[!] Provided pcap file is bigger than 100MB, so loading can take a while!\n"
                "[!] You can interrupt loading at any time using CTRL+C and classification "
                "will continue using already loaded packets.")
        # pylint: disable=no-value-for-parameter
        # (this problem is caused by a trick used in scapy, but in fact there is only 1 param)
        reader = PcapReader(
            pcap_filename)  # lgtm[py/call/wrong-number-class-arguments]
        for packet in reader:
            if len(packets) >= limit_packets:
                break
            packets.append(packet)
            if len(packets) % 10000 == 9999:
                load_time = time.time() - start_time
                print(
                    f"    Loaded first {len(packets)+1} packets (in {load_time:.2f} sec)..."
                )
    except KeyboardInterrupt:
        pass
    except (IOError, Scapy_Exception, ValueError) as exc:
        raise CotopaxiException(
            "[!] Cannot load network packets from the provided file "
            "(please make sure it is in PCAP or PCAPNG format)!") from exc
    load_time = time.time() - start_time
    print(
        f"[.] Loaded {len(packets)} packets from the provided file (in {load_time:.2f} sec)"
    )
    return packets
예제 #8
0
def parse_packet(filename):
    with PcapReader(filename) as file_capture:
        global start_parse, end_parse, count, totaltime
        for packet in file_capture:
            try:
                if (
                        len(packet) > 400 and packet.dport == 1813
                ):  # We only need pcakets whose length is greater than 400 bytes
                    # Capturing the RAW data from packet (the index value for raw data is 3)
                    start_parse = time()
                    radius_packet = str(packet[Radius])
                    # Pyrad has a dictionary with the RADIUS attributes defined, It'll help in decoding the RAW Packet
                    pkt = Packet(packet=radius_packet,
                                 dict=Dictionary("dictionary"))
                    attr1 = pkt._DecodeKey(8)
                    value1 = pkt.__getitem__(attr1)
                    attr2 = pkt._DecodeKey(31)
                    value2 = pkt.__getitem__(attr2)
                    end_parse = time()
                    print("Time Taken to parse RADIUS packet: %s seconds" %
                          (end_parse - start_parse))
                    count += 1
                    totaltime += (end_parse - start_parse)
                    print("%d Private IP: %s and MSISDN: %s" %
                          (count, value1, value2))
            except AttributeError:
                print(
                    "Port attribute not available in the packet, skipping the parsing on the packet... "
                )
def filter_pcap(pcap_path, iplist):
    """
    Filter capture by TCP packets addressed to any address in ``iplist``
    """
    ack_num = 0
    pkt_num = 0
    pcap_filtered = []
    orig_pcap = pcap_path + ".original"
    copyfile(pcap_path, orig_pcap)
    with PcapReader(orig_pcap) as preader:
        for p in preader:
            pkt_num += 1
            if 'TCP' in p:
                ip = p.payload
                if len(ip.payload.payload) == 0:
                    #ACK
                    ack_num += 1
                    continue
                if ip.dst in iplist or ip.src in iplist:
                    pcap_filtered.append(p)
    wrpcap(pcap_path, pcap_filtered)
    wl_log.debug("Filter out %d/%d ACK packets." % (ack_num, pkt_num))
    subprocess.call("rm " + orig_pcap, shell=True)
    subprocess.call("chmod 777 " + pcap_path, shell=True)
    wl_log.debug("Delete raw pcap and change priviledge of pcap file.")
예제 #10
0
def pcap_extract(pcap_path, hosts):
    with PcapReader(tcpdump(pcap_path, args=["-w", "-", "-n", "tcp"], getfd=True)) as pcreader:
        for p in pcreader:
            # we are only interested in syn-ack packet
            if not "TCP" in p:
                print("NOT TCP :OOOOO")
                continue
            if p["TCP"].flags != "SA":
                continue

            ip = p["IP"].src
            if not ip in hosts:
                continue

            ttl = p.ttl
            # round up ttl to closest one in the list
            for t in [32, 64, 128, 255]:
                if ttl <= t:
                    ttl = t
                    break

            mss = 0
            for o in p["TCP"].options:
                if o[0] == "MSS":
                    mss = o[1]
                    break

            if not hosts[ip].tcp:
                hosts[ip].tcp = {}

            tcp = hosts[ip].tcp

            tcp["ttl"] = max(tcp.get("ttl", 0), ttl)
            tcp["mss"] = max(tcp.get("mss", 0), mss)
            tcp["win"] = max(tcp.get("win", 0), p["TCP"].window)
def main(workspace='', args=None, parser=None):

    parser.add_argument(
        '--dry-run',
        action='store_true',
        help='Do not touch the database. Only print the object ID')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbose output from the pcapfile library.')
    parser.add_argument('pcap', help='Path to the PCAP file'),
    parsed_args = parser.parse_args(args)

    try:
        from scapy.all import PcapReader
    except ImportError:
        print('capfile not found, please install it to use this plugin.' \
              ' You can do install it by executing pip2 install scapy in a shell.')
        return 1, None

    if not os.path.isfile(parsed_args.pcap):
        print("pcap file not found: " % parsed_args.pcap)
        return 2, None

    wifi_pcaps = PcapReader(parsed_args.pcap)
    parse_wifi_pcaps(workspace, wifi_pcaps)

    if not parsed_args.dry_run:
        save_objs(workspace)

    return 0, None
예제 #12
0
def load_packets(pcap_filename):
    """Load packets from provided pcap file."""
    start_time = time.time()
    packets = []
    try:
        if os.path.getsize(pcap_filename) > 100 * 2**20:
            print(
                "[!] Provided pcap file is bigger than 100MB, so loading can take a while!\n"
                "[!] You can interrupt loading at any time using CTRL+C and classification "
                "will continue using already loaded packets.")
        reader = PcapReader(pcap_filename)  # pylint: disable=no-value-for-parameter
        for packet in reader:
            packets.append(packet)
            if len(packets) % 10000 == 9999:
                load_time = time.time() - start_time
                print(
                    f"    Loaded first {len(packets)+1} packets (in {load_time:.2f} sec)..."
                )
    except KeyboardInterrupt:
        pass
    except (IOError, Scapy_Exception, ValueError):
        raise CotopaxiException(
            "[!] Cannot load network packets from the provided file "
            "(please make sure it is in PCAP or PCAPNG format)!")
    load_time = time.time() - start_time
    print(
        f"[.] Loaded {len(packets)} packets from the provided file (in {load_time:.2f} sec)"
    )
    return packets
    def represent_by_length(self):
        """
        将数据集中是的每一条样本用收到或发送的数据包长度表示
        """
        if not os.path.isdir(self.dataset_path):
            logging.info("数据集目录 %s 不存在" % self.dataset_path)
            return

        for file_index, pcap_file in enumerate(os.listdir(self.dataset_path)):
            if not os.path.isfile(pcap_file):
                pass
            # 从文件名中匹配出标签,匹配从"_"到"."之间的字符串
            m = re.match(r"^.*\_(.*)\..*$" , pcap_file)
            label = m.group(1)
            lens = []
            with PcapReader(self.dataset_path + "/" + pcap_file) as pcap_reader:
                for packet in pcap_reader:
                    # 遍历当前pcap文件中的每一个packet
                    try:
                        src = packet['IP'].fields['src']
                        dst = packet['IP'].fields['dst']
                        len = packet['IP'].fields['len']
                    except:
                        continue

                    if (src not in self.save_ips) and (dst not in self.save_ips):
                        continue
                    # 每个流中的手机发送出的IP包长度为正,手机接收到IP包长度为负
                    tag = 1 if src == HOSTIP else -1
                    lens.append(tag * len)
            lens.append(label)
            print lens
        logging.info("已将数据集转换为包长度序列")
예제 #14
0
    def run(self, pcap, apk):
        ruleList = list()
        commentList = list()

        if pcap is None or apk is None:
            commentList.append(
                "this plugin requires a pcap file and an apk to work")
            logger.error(
                "plugin requires a pcap and an apk...but didn't get em")
            return (pluginName, None, commentList)

        try:
            from scapy.all import PcapReader, hexdump, ls
            import sys

            my_reader = PcapReader(pcap)
            if (self.findNotCompatiblePhoneHome(my_reader)):
                pt = self.decryptNotCompatibleData(apk, ruleList, commentList)
                (primary, secondary, pport, sport) = pt.split('|')
                commentList.append("new notcompatible sockets: %s:%s , %s:%s" %
                                   (primary, pport, secondary, sport))
                ruleList.append(self.get_dns_rule(primary))
                ruleList.append(self.get_dns_rule(secondary))
                ruleList.append(self.get_notc_rule(pport))
                ruleList.append(self.get_notc_rule(sport))
        except IOError:
            logger.error("Failed reading pcap")
            return (pluginName, None, None)

        return (pluginName, ruleList, commentList)
예제 #15
0
def get_file_data(filename, data_type='RAW', force_reload=False):
    if data_type not in FILE_DATA_TYPES:
        raise ValueError("Unknown data type %s" % data_type)
    data_type_info = FILE_DATA_TYPES[data_type]
    abs_file_path = os.path.abspath(filename)
    print abs_file_path
    if not os.path.isfile(abs_file_path):
        raise OSError("No such file: %s" % abs_file_path)
    data_id = (abs_file_path, data_type)
    data_id_as_raw = (abs_file_path, 'RAW')
    if (data_id not in file_data) or force_reload:
        # Drop reference in advance to avoid memory problems
        if data_id in file_data:
            file_data[data_id] = None
        print "Reading..."
        if data_type_info['needs_raw']:
            if (data_id_as_raw in file_data) and (file_data[data_id_as_raw]
                                                  is not None):
                rawdata = file_data[data_id_as_raw]
            else:
                print "Reading RAW..."
                f = open(abs_file_path, "rb")
                rawdata = f.read()
                f.close()
        else:
            rawdata = None
        if data_type == 'RAW':
            file_data[data_id] = rawdata
        elif data_type == 'JSON':
            file_data[data_id] = json.loads(rawdata)
        elif data_type == 'PCAP':
            pcapreader = PcapReader(abs_file_path)
            file_data[data_id] = [p for p in pcapreader]
    return file_data[data_id] if data_id in file_data else None
예제 #16
0
def test_ecn_marking_at_ecress(api, duthost, ecn_marking_at_ecress,
                               start_delay, pause_line_rate, traffic_line_rate,
                               traffic_duration, port_bandwidth, frame_size,
                               ecn_thresholds):

    duthost.shell('sudo pfcwd stop')
    duthost.shell('sudo sudo ecnconfig -p AZURE_LOSSLESS -gmax %s' %
                  (ecn_thresholds))
    duthost.shell('sudo sudo ecnconfig -p AZURE_LOSSLESS -gmin %s' %
                  (ecn_thresholds))

    for base_config in ecn_marking_at_ecress:
        rx_port = base_config.ports[1]
        rx_port.capture = Capture(choice=[], enable=True)

        # create the configuration
        api.set_config(base_config)

        # start capture
        api.set_port_capture(PortCapture(port_names=[rx_port.name]))

        # start all flows
        api.set_flow_transmit(FlowTransmit(state='start'))

        exp_dur = start_delay + traffic_duration
        logger.info("Traffic is running for %s seconds" % (traffic_duration))
        time.sleep(exp_dur)

        # stop all flows
        api.set_flow_transmit(FlowTransmit(state='stop'))

        pcap_bytes = api.get_capture_results(
            CaptureRequest(port_name=rx_port.name))

        # Get statistics
        test_stat = api.get_flow_results(FlowRequest())

        for rows in test_stat['rows']:
            tx_frame_index = test_stat['columns'].index('frames_tx')
            rx_frame_index = test_stat['columns'].index('frames_rx')
            caption_index = test_stat['columns'].index('name')
            if ((rows[caption_index] == 'Test Data')
                    or (rows[caption_index] == 'Background Data')):
                tx_frames = float(rows[tx_frame_index])
                rx_frames = float(rows[rx_frame_index])
                if ((tx_frames != rx_frames) or (rx_frames == 0)):
                    pytest_assert(
                        False,
                        "Not all %s reached Rx End" % (rows[caption_index]))

        # write the pcap bytes to a local file
        with open('%s.pcap' % rx_port.name, 'wb') as fid:
            fid.write(pcap_bytes)

        from scapy.all import PcapReader
        reader = PcapReader('%s.pcap' % rx_port.name)
        for item in reader:
            logger.info(tem.time)
            logger.info(item.show())
예제 #17
0
파일: parser.py 프로젝트: cjelsa/iex_parser
class Parser:

    # noinspection PyArgumentList
    def __init__(self,
                 filename: str,
                 protocol: str,
                 queue_length=25000) -> None:
        self.reader = PcapReader(filename)
        self.protocol = protocol
        self.queue_length = queue_length

    def __enter__(self) -> DeepPcapReader:
        self.reader.__enter__()
        return DeepPcapReader(self.reader, self.protocol, self.queue_length)

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.reader.__exit__(exc_type, exc_val, exc_tb)
예제 #18
0
    def set_loss_file_path(self, loss_file_path):
        """
        Sets the file which consists of the loss.

        :param loss_file_path: the path of the file consisting loss
        :type loss_file_path: basestring

        :return: self
        :rtype: LossTraceParser
        """

        assert isinstance(loss_file_path, basestring)
        assert isfile(loss_file_path) and exists(loss_file_path)

        self.__loss_file_path = loss_file_path
        self.__loss_pcap_reader = PcapReader(loss_file_path)
        return self
예제 #19
0
    def generate_dataset(self):
        """
        解析每个pcap包,将每个样本用向量形式表示,向量每个元素是每个包长度。
        """
        label_index = 0
        if not os.path.isdir(self.dataset_path):
            logging.info("数据集目录 %s 不存在" % self.dataset_path)
            return

        for pcap_file in os.listdir(self.dataset_path):
            seq_len = 0
            label = np.zeros(self.label_num)
            sample = np.zeros((self.max_len, self.feature_num))

            if not os.path.isfile(pcap_file):
                pass
            # 从文件名中匹配出标签,匹配从"_"到"."之间的字符串
            m = re.match(r"^.*\_(.*)\..*$", pcap_file)
            raw_label = m.group(1)
            # 当出现一个还没有编号的label则给label编号
            if raw_label not in self.label_mean:
                if len(self.label_mean) >= self.label_num:
                    raise Exception("Label number error")
                self.label_mean[raw_label] = label_index
                label_index += 1
            label[self.label_mean[raw_label]] = 1
            self.labels.append(label)

            with PcapReader(self.dataset_path + "/" +
                            pcap_file) as pcap_reader:
                for packet in pcap_reader:
                    # 舍弃max_len以后的包
                    if seq_len > self.max_len - 1:
                        break
                    # 拿不到IP部分的包就跳过
                    try:
                        src = packet['IP'].fields['src']
                        dst = packet['IP'].fields['dst']
                        ip_len = packet['IP'].fields['len']
                    except:
                        continue
                    # 不在目标ip list中的ip就跳过
                    if (src not in self.save_ips) and (dst
                                                       not in self.save_ips):
                        continue
                    # 每个流中的手机发送出的IP包长度为正,手机接收到IP包长度为负
                    tag = 1 if src == config.host_ip else -1
                    # 将ip_len归一化
                    if ip_len > self.max_value:
                        sample[seq_len] = (1.0 * tag)
                    else:
                        sample[seq_len] = (float(ip_len) /
                                           self.max_value) * tag
                    seq_len += 1
                self.samples_len.append(seq_len)
            self.data.append(sample)
        logging.info("已将数据集转换为包长度序列")
예제 #20
0
def main(workspace='', args=None, parser=None):

    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbose output from the pcapfile library.')
    parser.add_argument('pcap', help='Path to the PCAP file'),

    parsed_args = parser.parse_args(args)

    try:
        from scapy.all import PcapReader
    except ImportError:
        print 'capfile not found, please install it to use this plugin.' \
              ' You can do install it by executing pip2 install scapy in a shell.'
        return 1, None

    if not os.path.isfile(parsed_args.pcap):
        print "pcap file not found: " % parsed_args.pcap
        return 2, None

    pcap = PcapReader(parsed_args.pcap)
    for (domain, ip) in get_domain_resolutions(pcap):
        obj = factory.createModelObject(models.Host.class_signature,
                                        ip,
                                        workspace,
                                        parent_id=None)

        old = models.get_host(workspace, obj.getID())
        if old is None:
            models.create_host(workspace, obj)

        interface = factory.createModelObject(
            models.Interface.class_signature,
            '',
            workspace,
            # mac=bssid,
            ipv4_address=ip,
            ipv4_gateway='',
            ipv4_mask='',
            ipv4_dns='',
            ipv6_address='',
            ipv6_gateway='',
            ipv6_prefix='',
            ipv6_dns='',
            network_segment='',
            hostnames=[domain],
            parent_id=obj.getID())
        old = models.get_interface(workspace, obj.getID())
        if old is None:
            try:
                models.create_interface(workspace, interface)
            except:
                pass

    return 0, None
예제 #21
0
 def testLog(self):
     data = PcapReader("./dns1.pcap")
     feeder = dnscap.Feeder(data, self.packetlog)
     feeder.loop()
     events = [[e.metric, e.service] for m in self.riemann.messages
               for e in m.events]
     rec = [[0.01802176899380154, 'dns_average'],
            [0.03196001052856445, 'dns_max'], [9, 'dns_count'],
            [0, 'dns_orphans']]
     self.assertEqual(events, rec)
예제 #22
0
def main():
    if len(sys.argv) != 2:
        print("I need an input file. Usage ./pcap2curl.py inputfilename")
        return

    infile = sys.argv[1]

    with PcapReader(infile) as packets:
        for p in packets:
            if p.haslayer(TCP) and p.haslayer(Raw) and p[TCP].dport == 80:
                payload = p[Raw].load
                print(payload2curl(payload))
예제 #23
0
def readline_iter(filename):
    '''
    数据源为pcap文件的时候,这样进行数据包的读取
    :param filename: 数据包的文件名
    :return:
    '''
    with PcapReader(filename) as fdesc:
        while True:
            packet = fdesc.read_packet()
            if packet is None:
                break
            yield packet
    def represent_by_length(self):
        """
        将数据集中是的每一条样本用收到或发送的数据包长度表示
        """
        if not os.path.isdir(self.dataset_path):
            logging.info("数据集目录 %s 不存在" % self.dataset_path)
            return

        for file_index, pcap_file in enumerate(os.listdir(self.dataset_path)):
            if not os.path.isfile(pcap_file):
                pass
            # 从文件名中匹配出标签,匹配从"_"到"."之间的字符串
            m = re.match(r"^.*\_(.*)\..*$", pcap_file)
            label = m.group(1)
            port2tuple = {}
            with PcapReader(self.dataset_path + "/" +
                            pcap_file) as pcap_reader:
                for packet in pcap_reader:
                    # 遍历当前pcap文件中的每一个packet
                    try:
                        src = packet['IP'].fields['src']
                        dst = packet['IP'].fields['dst']
                        len = packet['IP'].fields['len']
                    except:
                        continue
                    try:
                        sport = packet['TCP'].fields['sport']
                        dport = packet['TCP'].fields['dport']
                    except:
                        continue
                    if (src in self.save_ips) or (dst in self.save_ips):
                        # 以手机的端口号来区分一个动作所产生的不同流,
                        # 每个流中的手机发送出的IP包长度为正,手机接收到IP包长度为负
                        port, tag = (sport, 1) if src == HOSTIP else (dport,
                                                                      -1)
                        # port2tuple的键是端口号,值是一个含有3个list的tuple,
                        # 第一个是发送和接收到数据包长度,第二个是发送的数据包长度,第三个是接收的数据包长度
                        if port in port2tuple:
                            port2tuple[port][0].append(tag * len)
                        else:
                            port2tuple[port] = ([tag * len], [], [])
                        if tag == 1:
                            port2tuple[port][1].append(len)
                        elif tag == -1:
                            port2tuple[port][2].append(len)
                        else:
                            raise Exception("既不是发送出去的包也不是接收到的包")
            temp = port2tuple.values()
            # 为了让每个样本的标签不同,所以在标签后面加一个文件编号(由于接下来会把所有样本的所有流进行聚类)
            temp.append(label + str(file_index))
            self.processed_dataset.append(temp)
        logging.info("已将数据集转换为包长度序列")
예제 #25
0
def deal_with_flow(prot, nb):
	
	with open(prot +"_flows/flow" + str(nb) + ".pcap", 'rb') as f:
		
		pkts = PcapReader(f)
			
		checksums = []
		time_deltas = []
		payload_lengths = []
		PSHs = []
		
		time_prev = 0
		time = 0
		first_iter  = True

		for pkt in pkts:
			
			time_delta = 0
			time_prev = time
			time = pkt.time

			if first_iter:
				
				portA = pkt.payload.payload.sport
				portB = pkt.payload.payload.dport
				proto = pkt.payload.proto

				[portA, portB] = sorted([portA, portB]) 
				first_iter = False

			else:
				time_delta = time - time_prev

			PSH = 0
			if(type(pkt.payload.payload) is scapy.layers.inet.TCP):
				if(pkt.payload.payload.flags & PSH_mask):
					PSH = 1

			checksums.append(pkt.payload.payload.chksum)
			time_deltas.append(time_delta)
			payload_lengths.append(len(pkt.payload.payload.payload))
			PSHs.append(PSH)

		time_deltas = list(map(float, time_deltas))

		print(str(proto) + "," + str(portA) + "," +\
		       str(portB) + "," +\
		       str(np.average(checksums)) + "," +\
		       str(np.average(time_deltas)) + "," +\
		       str(np.average(payload_lengths)) + "," +\
		       str(len(np.nonzero(PSHs)[0])/len(PSHs)))
    def decode_pcap(self):
        """Extract extended header-sized UDP data from file."""
        HEADER_SIZE = 64

        # Initialise frame list and counters
        frames = []
        num_packets = 0
        num_frames = 0

        # Create a PCAP reader instance
        packets = PcapReader(self.filename)

        # Iterate through packets in reader
        for packet in packets:
            num_packets += 1

            # Extract UDP packet payload
            payload = bytes(packet[UDP].payload)

            # Read frame header
            header = np.frombuffer(payload[:HEADER_SIZE], dtype=np.uint64)
            # print([hex(val) for val in header])
            # assert header[0] == num_frames    # Assumes PCAPNG file begins from frame 0

            # If this is a start of frame packet, reset frame data
            if int(header[1]) & self.SOF:
                frame_data = bytes()

            # # Packet flags extracted from data:
            # SoF = int(header[1]) & self.SOF
            # EoF = int(header[1]) & self.EOF
            # print(len(payload), len(payload[HEADER_SIZE:]), SoF, EoF)

            # Append frame payload to frame data, including header
            frame_data += payload

            # If this is an end of frame packet, convert frame data to numpy array and append to frame list
            if int(header[1]) & self.EOF:
                frame = np.frombuffer(frame_data, dtype=np.uint16)
                # assert frame.size == self.NPIXELS
                frames.append(frame)
                num_frames += 1

        # Convert frame list to 3D numpy array
        frames = np.array(frames)

        print("Decoded {} frames from {} packets in PCAP file {}".format(
            num_frames, num_packets, self.filename))

        return frames
예제 #27
0
def filter_pcap(pcap_path, iplist):
    """
    Filter capture by TCP packets addressed to any address in ``iplist``
    """
    pcap_filtered = []
    orig_pcap = pcap_path + ".original"
    copyfile(pcap_path, orig_pcap)
    with PcapReader(orig_pcap) as preader:
        for p in preader:
            if 'TCP' in p:
                ip = p.payload
                if ip.dst in iplist or ip.src in iplist:
                    pcap_filtered.append(p)
    wrpcap(pcap_path, pcap_filtered)
예제 #28
0
def start_parsing(path):
    if (path == None):
        evts = PcapEvents(None)
        http = QueenHandler()
        logger.debug("no file given, start sniffing")
        thread = Thread(target=evts.setup_sniffer)
    else:
        pcap_file = PcapReader(path)
        evts = PcapEvents(pcap_file)
        http = QueenHandler()
        evts[http.accept] = http.handle
        thread = Thread(target=evts.all_packages)
    thread.daemon = True
    return (http, thread)
예제 #29
0
def main():
    # if len(sys.argv) != 2:
    #     print ("I need an input file. Usage ./pcap2curl.py inputfilename")
    #     return

    infile = "52.pcap"

    with PcapReader(infile) as packets:
        for p in packets:
            if p.haslayer(TCP) and p.haslayer(Raw) and p[TCP].dport == 3458:
                payload = p[Raw].load
                cmd = payload2curl(payload)
                if cmd:
                    print(cmd)
예제 #30
0
def scapy_io(f_in, f_out):
    f = PcapReader(f_in)
    o = PcapWriter(f_out)
    pkt = f.read_packet()
    while pkt is not None:
        o.write(pkt)
        pkt = f.read_packet()
    f.close()
    o.close()
    def decode_pcap(self):
        """Extract extended header-sized UDP data from file."""
        HEADER_SIZE = 8

        # Initialise frame list and counters
        frames = []
        num_packets = 0
        num_frames = 0

        # Create a PCAP reader instance
        packets = PcapReader(self.filename)
        frame_data = bytes()
        # Iterate through packets in reader
        for packet in packets:
            num_packets += 1

            # Extract UDP packet payload
            payload = bytes(packet[UDP].payload)

            # Read frame header
            header = np.frombuffer(payload[:HEADER_SIZE], dtype=np.uint64)
            # print([hex(val) for val in header])
            # assert header[0] == num_frames    # Assumes PCAPNG file begins from frame 0
            # print(" header = {}".format(payload[:HEADER_SIZE]))
            # If this is a start of frame packet, reset frame data
            if int(header[0]) & self.SOF:
                frame_data = bytes()

            # Append frame payload to frame data, including header
            frame_data += payload
            # masked = 0x3FFFFFFF
            # pktNum = (int(header[0]) >> 32) & masked
            # frmNum = int(header[0]) & masked
            # if frmNum < 3:  # Debug info
            #     print(len(payload), len(payload[HEADER_SIZE:]), frmNum, pktNum)

            # If this is an end of frame packet, convert frame data to numpy array and append to frame list
            if int(header[0]) & self.EOF:
                frame = np.frombuffer(frame_data, dtype=np.uint16)
                frames.append(frame)
                num_frames += 1

        # Convert frame list to 3D numpy array
        frames = np.array(frames)

        print("Decoded {} frames from {} packets in PCAP file {}".format(
            num_frames, num_packets, self.filename))

        return frames
예제 #32
0
 def parse_pcap(self):
     logging.info("Plugin %s%s%s parsing '%s%s%s' capture file. Will output info below as usual..." % (GR,self.name,G,GR,self.pcap,G))
     #data = rdpcap(self.pcap) #Tests indicate this is much slower
     if not os.path.isfile(self.pcap):
         logging.error("No such file %s%s%s! Terminating plugin." % (GR,self.pcap,G))
         exit(-1)
     try:
         r = PcapReader(self.pcap)
     except Exception, e:
         if 'Scapy_Exception' in str(e):
             logging.error("Plugin %s%s%s unable to parse '%s%s%s' capture file. Likely bad format (does not support pcap-ng). Plugin terminating..." % (GR,self.name,G,GR,self.pcap,G))
         else:
             logging.error("Plugin %s%s%s unable to parse '%s%s%s' capture file. Plugin terminating. '%s'" % (GR,self.name,G,GR,self.pcap,G,e))
         self.ready_status = False
         exit(-1)
예제 #33
0
  def WritePcap(self, matchMap, file, outputFilename=None):
    '''WritePcap(matchMap, file, outputFilename=None

Writes the matched pcap sessions in matchMap found in file to separate pcap files.

matchMap:  The output from MatchPcap.
file:  The pcap file you matched on previously.
outputFilename:  Allows you to specify the prefix on the output pcaps.

'''
    try:
      if not matchMap.keys():
        self.debug('matchMap is empty! No matches from greppcap?')
        raise
    except:
      self.debug('Not a valid matchMap.')
      raise
  
    try:
      pcap = PcapReader(file)
      if not outputFilename:
        # There's probably some python fu way to do this.  I have the regex fu.
        try:
          filename = re.findall(r'^(?is)[./]?(?:[^/]+/)*([^/]+)(?:\.[^\./]+)$', file)[0]
        except:
          # base filename was too crazy to figure out, go with a default one
          filename = 'greppcap'
      else:
        filename = outputFilename
    except:
      self.error('Not a valid pcap file: %s' % file)
      raise
  
    self.debug('matchMap: %s' % matchMap)
    self.debug('Writing pcaps...')
  
    # Open file handle on a pcap and append the packet to the right pcap.
    while pcap:
      try:
        packet = pcap.read_packet()
        writePacket = False
        for id in matchMap.keys(): 
          try:
            if (packet['IP'].src,packet[2].sport) in (matchMap[id]['host1'],matchMap[id]['host2']):
              if (packet['IP'].dst,packet[2].dport) in (matchMap[id]['host1'],matchMap[id]['host2']):
                writePacket = True
          except AttributeError:
            if matchMap[id]['proto'] == packet['IP'].proto:
              if packet['IP'].src in (matchMap[id]['host1'], matchMap[id]['host2']):
                if packet['IP'].dst in (matchMap[id]['host1'], matchMap[id]['host2']):
                  writePacket = True
          except IndexError:
            continue # not IP
          if writePacket:
            # Create/append the packet to a pcap file and close the handler.
            # Doing it this way avoids hitting any open file handler limit (resource.getrlimit(resource.RLIMIT_NOFILE))
            try:
              wrpcap('%s_match%d.pcap' % (filename,id),packet,append=True,sync=True)
            except IOError as e:
              self.error('OS limitation prevented completion of %s_match%d.pcap.  Error: %s' % (filename,id,e))
            break
      except TypeError:
        break # end of pcap
  
    # Now nicely announce the completion of pcaps.
    for id in matchMap.keys():
      matchMap[id]['pcap'] = '%s_match%d.pcap' % (filename,id)
      try:
        self.info('Wrote IP proto %d %s:%d <> %s:%d into %s' % (matchMap[id]['proto'],matchMap[id]['host1'][0],matchMap[id]['host1'][1],matchMap[id]['host2'][0],matchMap[id]['host2'][1],matchMap[id]['pcap']))
      except:
        self.info('Wrote IP proto %d %s <> %s into %s' % (matchMap[id]['proto'],matchMap[id]['host1'],matchMap[id]['host2'],matchMap[id]['pcap']))
  
    return matchMap
예제 #34
0
def _set_relative_time(pcapFile, tstart, tdelta):
    r = PcapReader(pcapFile)
    time_init = r.next().time
    tstart = tstart + time_init
    tstop = tstart + tdelta
    return tstart, tstop
예제 #35
0
파일: network.py 프로젝트: ldorigo/LaBot
def sniff(store=False, prn=None, lfilter=None,
          stop_event=None, refresh=.1, offline=None, *args, **kwargs):
    """Sniff packets
sniff([count=0,] [prn=None,] [store=1,] [offline=None,] [lfilter=None,] + L2ListenSocket args)
Modified version of scapy.all.sniff

store : bool
    wether to store sniffed packets or discard them

prn : None or callable
    function to apply to each packet. If something is returned,
    it is displayed.
    ex: prn = lambda x: x.summary()

lfilter : None or callable
    function applied to each packet to determine
    if further action may be done
    ex: lfilter = lambda x: x.haslayer(Padding)

stop_event : None or Event
    Event that stops the function when set

refresh : float
    check stop_event.set() every `refresh` seconds
    """
    logger.debug("Setting up sniffer...")
    if offline is None:
        L2socket = conf.L2listen
        s = L2socket(type=ETH_P_ALL, *args, **kwargs)
    else:
        s = PcapReader(offline)

    # on Windows, it is not possible to select a L2socket
    if WINDOWS:
        from scapy.arch.pcapdnet import PcapTimeoutElapsed
        read_allowed_exceptions = (PcapTimeoutElapsed,)

        def _select(sockets):
            return sockets
    else:
        read_allowed_exceptions = ()

        def _select(sockets):
            try:
                return select(sockets, [], [], refresh)[0]
            except select_error as exc:
                # Catch 'Interrupted system call' errors
                if exc[0] == errno.EINTR:
                    return []
                raise
    lst = []
    try:
        logger.debug("Started Sniffing")
        while True:
            if stop_event and stop_event.is_set():
                break
            sel = _select([s])
            if s in sel:
                try:
                    p = s.recv(MTU)
                except read_allowed_exceptions:
                    # could add a sleep(refresh) if the CPU usage
                    # is too much on windows
                    continue
                if p is None:
                    break
                if lfilter and not lfilter(p):
                    continue
                if store:
                    lst.append(p)
                if prn:
                    r = prn(p)
                    if r is not None:
                        print(r)
    except KeyboardInterrupt:
        pass
    finally:
        logger.debug("Stopped sniffing.")
        s.close()

    return plist.PacketList(lst, "Sniffed")
예제 #36
0
  def MatchPcap(self, regex, file, negated=False, ports=[]):
    '''MatchPcap(regex, file, negated=False, ports=[])

Matches the given regex with the given pcap file. Returns a matchMap dictionary of sessions that matched.

regex:  Any ol' valid regex will do.
file:  A libpcap file.
negated:  If true, finds sessions that do NOT match the regex.
ports:  A list of ports to restrict the matching to.
'''
  
    try:
      regex = re.compile(r'(?s)%s' % regex)
    except:
      self.error('Invalid regular expression: %s' % regex)
      raise Exception('Invalid regular expression.')
  
    try:
      # scapy.utils.PcapReader
      self.debug('Reading: %s' % file)
      pcap = PcapReader(file)
    except:
      # yes, logging.exception should be used here, but it doesn't add any value
      self.error('Not a valid pcap file: %s' % file)
      raise
  
    # matchMap format:
    # {3: {'proto': 6, 'host1': ('1.2.3.4',1024), 'host2': ('9.8.7.6',80)}}
    matchMap = {}
    newid = 1
  
    # Build a list of streams that match the search regex
    while pcap:
      try:
        packet = pcap.read_packet()
        match = {}
        matchedStream = False
  
        # Skip if the session's ports aren't in the allowed port list (-p).
        try:
          if ports and not (packet[2].sport in ports or packet[2].dport in ports):
            continue
        except AttributeError:
          # Continue; weren't any ports at all (ip.proto not in (6,17))
          pass
        except IndexError:
          # Wasn't even IP, skip it
          pass
  
        # Perform match
        try:
          rawpacket = packet[3].build()
          # for some reason, re.match doesn't work, yet re.findall does.
          if regex.findall(rawpacket):
            matchedStream = True
            #most verbose: self.debug('matched\n%s' % str(rawpacket))
  
          if matchedStream or negated:
            # Run the list backwards in hope of matching early rather than matching at the end of the entire list.
            ids = matchMap.keys()
            ids.reverse()
            unknownStream = True
            for id in ids:
              try:
            # Assuming we'll never see a packet with same src and dst
            # TCP,UDP layers referred to by index offset for code simplicity
            # would do this as one if statement, but seperating it helps exit early and save cpu cycles
                if (packet['IP'].src,packet[2].sport) in (matchMap[id]['host1'],matchMap[id]['host2']):
                  if (packet['IP'].dst,packet[2].dport) in (matchMap[id]['host1'],matchMap[id]['host2']):
                    unknownStream = False
            # This avoids source port reuse problems, causing session collisions
            # unknownStream is True if its a known session yet tcp syn flag is set.
                    if packet['IP'].proto == 6 and packet['TCP'].sprintf('%flags%') == 'S':
                      unknownStream = True
                    break
              except AttributeError:
                # most likely the session isn't tcp/udp so scapy throws AttributeError if no sport/dport exists.  Try without it instead.
                if matchMap[id]['proto'] == packet['IP'].proto:
                  if packet['IP'].src in (matchMap[id]['host1'], matchMap[id]['host2']): 
                    if packet['IP'].dst in (matchMap[id]['host1'], matchMap[id]['host2']):
                      unknownStream = False
                      break
  
          # if its not negated and its a newly matched stream, OR negated and an unknown, add it to matchMap.  if its negated and matched later, it gets deleted before the end
          if (matchedStream and unknownStream and not negated) or (negated and unknownStream and not matchedStream):
            matchMap[newid] = {}
            # Personal preference of mine:  printing matches here rather than when the function finishes gives the user a feeling things are happening, rather than get the messages all at once at the end of the call.
            # This is doubly so when dealing with massive 1g+ pcap files
            try:
              matchMap[newid] = {'proto': packet['IP'].proto, 'host1': (packet['IP'].src,packet[2].sport), 'host2': (packet['IP'].dst,packet[2].dport)}
              self.info('Match #%d: Proto %d, IPs %s:%d, %s:%d' % (newid,matchMap[newid]['proto'],matchMap[newid]['host1'][0],matchMap[newid]['host1'][1],matchMap[newid]['host2'][0],matchMap[newid]['host2'][1]))
            except AttributeError:
              matchMap[newid] = {'proto': packet['IP'].proto, 'host1': packet['IP'].src, 'host2': packet['IP'].dst} 
              self.info('Match #%d: Proto %d, IPs %s, %s' % (newid,matchMap[newid]['proto'],matchMap[newid]['host1'],matchMap[newid]['host2']))
            newid += 1
          elif matchedStream and negated and not unknownStream:
            # Flag the session as matching regex to NOT keep.
            # If deleted now, it would just come back from the next related packet
            matchMap[id]['delete'] = True

        except IndexError:
          pass # no raw layer, nothing to search
      except TypeError:
        break
  
    if negated:
      for id in matchMap.keys():
        try:
          if matchMap[id]['delete']:
            del matchMap[id]
            self.info('Match #%d matched, removed from result.' % id)
        except KeyError:
          pass
      # rebuilding the sequential id's here might get confusing with the prior-printed messages.  probably best to avoid it.

    pcap.close()
    del pcap
    return matchMap
예제 #37
0
def sniff(
    count=0,
    store=1,
    offline=None,
    prn=None,
    lfilter=None,
    L2socket=None,
    timeout=None,
    opened_socket=None,
    stop_filter=None,
    var_stop=False,
    *arg,
    **karg
):
    """Sniff packets
sniff([count=0,] [prn=None,] [store=1,] [offline=None,] [lfilter=None,] + L2ListenSocket args) -> list of packets

	count: number of packets to capture. 0 means infinity
	store: wether to store sniffed packets or discard them
	prn: function to apply to each packet. If something is returned,
	     it is displayed. Ex:
	     ex: prn = lambda x: x.summary()
lfilter: python function applied to each packet to determine
	     if further action may be done
	     ex: lfilter = lambda x: x.haslayer(Padding)
offline: pcap file to read packets from, instead of sniffing them
timeout: stop sniffing after a given time (default: None)
L2socket: use the provided L2socket
opened_socket: provide an object ready to use .recv() on
stop_filter: python function applied to each packet to determine
	         if we have to stop the capture after this packet
	         ex: stop_filter = lambda x: x.haslayer(TCP)
	"""
    c = 0

    if opened_socket is not None:
        s = opened_socket
    else:
        if offline is None:
            if L2socket is None:
                L2socket = conf.L2listen
            s = L2socket(type=ETH_P_ALL, *arg, **karg)
        else:
            s = PcapReader(offline)

    lst = []
    if timeout is not None:
        stoptime = time.time() + timeout
    remain = None

    while 1 and not var_stop:
        try:
            if timeout is not None:
                remain = stoptime - time.time()
                if remain <= 0:
                    break
            sel = select.select([s], [], [], remain)
            if s in sel[0]:
                p = s.recv(MTU)
                if p is None:
                    break
                if lfilter and not lfilter(p):
                    continue
                if store:
                    lst.append(p)
                c += 1
                if prn:
                    r = prn(p)
                    if r is not None:
                        print r
                if stop_filter and stop_filter(p):
                    break
                if count > 0 and c >= count:
                    break
        except KeyboardInterrupt:
            var_stop = True
            break

    if opened_socket is None:
        s.close()