コード例 #1
0
ファイル: Parse.py プロジェクト: joubin/VeloParse
def read_pcap_raw(my_pcap_file: str = '../test.pcap') -> [FireData]:
    """
    reads the pcap file and get raw content from it
    :param my_pcap_file:
    :return:
    """
    testcap = open(my_pcap_file, 'rb')
    capfile = savefile.load_savefile(testcap, verbose=False)
    testcap.close()
    fire_data_collection = []
    for cap in capfile.packets:
        try:
            packet, header = next_bytes(cap.raw(), 42)
            packet, payload = next_bytes(packet, 1206)
            packet, block_fire_data = next_bytes(payload, 1200)
            # fire_data = []
            for i in range(12):
                block_fire_data, data = next_bytes(block_fire_data, 100)
                fire_data_collection.append(FireData.FireData.create_with_date(data))
                # fire_data_collection.append(fire_data)
        except ValueError:
            # TODO this value error is caused because of packets being not containing
            # exactly what the documentation claims
            # FIX later
            pass

    return fire_data_collection
コード例 #2
0
def extract_credentials_from_pcap(pcap_file):
    from pcapfile import savefile

    print("Loading and parsing pcap file:")
    testcap = open(pcap_file, 'rb')
    capfile = savefile.load_savefile(testcap, layers=1, verbose=True)
    print("\n")
    for packet in capfile.packets:
        packet = bytearray(binascii.unhexlify(packet.packet.payload))
        if (len(packet) <= 40): # tcp header
            continue

        packet = packet[40:] # tcp header

        command = get_command_from_header(packet)
        if command != 513:
            if g_debug:
                print("Not control command, continuing to next packet, command: %d" % command)
            continue

        device_id = binascii.hexlify(packet[40:43]).decode("utf-8") 
        phone_id = binascii.hexlify(packet[44:46]).decode("utf-8") 
        device_pass = binascii.hexlify(packet[48:52]).decode("utf-8")
        
        return device_id, phone_id, device_pass

    print("ERROR: Didn't find ids in pcap file")
    exit(-1)
コード例 #3
0
def main():
	if(len(sys.argv) != 2):
		print("Usage: python parsePcap.py <pcap.file>")
		return


	fileIn = open(sys.argv[1])
	capFile = savefile.load_savefile(fileIn, verbose=True)

	previousMouseState = 0

	for packet in capFile.packets:
		if(len(hexlify(packet.raw())) == 70):
			leftoverPacketData = hexlify(packet.raw())[54:]

			if(hexlify(packet.raw())[43] == '1'):
				keyPressed = int(leftoverPacketData[4:6], 16);
				if keyPressed in hidMap:
					if(keyPressed == 0):
						print(leftoverPacketData)
					else:
						print(hidMap[keyPressed])
			else:
				clicked = int(leftoverPacketData[0:2], 16)
				if(clicked == 1):
					print("Mouse clicked!"+str(packet.timestamp))
				else:
					print("Mouse unclicked!")
コード例 #4
0
def output_packets(fname, num_packets):
    print("PCAP file is ", fname)

    fp = open(fname, 'rb')
    pcap = savefile.load_savefile(fp, layers=3, verbose=True)
    print(pcap)

    if len(pcap.packets) < num_packets:
        max_packet = len(pcap.packets)
    else:
        max_packet = num_packets

    for i in range(0, max_packet):
        eth_frame = pcap.packets[i]
        print(eth_frame)
        type = eth_frame.packet.type
        if type == 0x0800:  # IP
            ip_packet = eth_frame.packet.payload
            print(ip_packet)
            layer3_packet = ip_packet.payload
            print(layer3_packet)
        elif type == 0x8100:  # VLAN
            vlan_packet = eth_frame.packet.payload
            print(vlan_packet)
            ip_packet = vlan_packet.payload
            print(ip_packet)
            layer3_packet = ip_packet.payload
            print(layer3_packet)
        print()
コード例 #5
0
def pcapinfo():
    import sys
    from pcapfile import savefile
    testcap = open('E:/Cyber/test.pcap', 'rb')
    capfile = savefile.load_savefile(testcap, verbose=True)
    sys.stdout = open('E:/Cyber/pcapinfo.txt', 'w')
    print capfile
コード例 #6
0
ファイル: savefile_test.py プロジェクト: quinao/pypcapfile
    def test_lazy_import(self):
        """
        Test the lazy packet parsing against the regular implementation.
        """
        # Load the savefile again, but create an iterator for the
        # packets instead of reading them all into memory at once.
        tfile = create_pcap()
        capfile_gen = savefile.load_savefile(tfile, lazy=True)

        # Create a list of packets using the iterator. This way the
        # length can be checked before comparing any content.
        packets = list(capfile_gen.packets)

        tfile.close()
        if os.path.exists(tfile.name):
            os.unlink(tfile.name)

        self.assertEqual(len(packets), len(self.capfile.packets),
                         'lazy parsing gives different number of packets!')

        # Compare the relevant parts of the packets.
        fields = ['timestamp', 'timestamp_us', 'capture_len',
                  'packet_len', 'packet']
        for act, ref in zip(packets, capfile_gen.packets):
            for field in fields:
                self.assertEqual(getattr(act, field), getattr(ref, field),
                                 'lazy parsing gives different data!')
コード例 #7
0
ファイル: parse3.py プロジェクト: 5l1v3r1/ctf-14
def main():
    s=""
    lastt=0
    if(len(sys.argv) != 2):
        print("Usage: python parsePcap.py <pcap.file>")
        return


    fileIn = open(sys.argv[1])
    capFile = savefile.load_savefile(fileIn, verbose=True)

    for packet in capFile.packets:
        if(len(hexlify(packet.raw())) == 0x41*2):
            leftoverPacketData = hexlify(packet.raw())[-2:]

            t=packet.timestamp*1000+packet.timestamp_ms
            diff=t-lastt
            lastt=t
            if diff>100000:
                continue
            if leftoverPacketData!="03":
                if diff>300:
                    s+="_"
                else:
                    s+="."
            else:
                if diff<300:
                    s+=""
                else:
                    s+=" "
    print s
コード例 #8
0
def parsepcapfilejoin(s, filename):
    global basetimestamp
    global lastfiletimestamp
    global lastcount
    testcap = open(filename, 'rb')
    capfile = savefile.load_savefile(testcap, verbose=True)
    count = 1
    prevtimestamp = capfile.packets[0].timestamp
    if (basetimestamp == 0):
        basetimestamp = prevtimestamp
    else:
        if (lastfiletimestamp == prevtimestamp):
            count = lastcount
    print "process:", filename, "len(capfile.packets):", len(capfile.packets)
    print basetimestamp
    ts = np.array([])
    for pkt in capfile.packets[1:]:
        timestamp = pkt.timestamp
        d = timestamp - prevtimestamp
        if (d == 0):
            count += 1
        else:
            #print prevtimestamp, "count:", count
            if (count > THRESH_PKT):
                s = np.append(s, prevtimestamp - basetimestamp)
            prevtimestamp = timestamp
            count = 1
    #print prevtimestamp, "count:", count
    if (count > THRESH_PKT):
        s = np.append(s, prevtimestamp - basetimestamp)
    lastfiletimestamp = timestamp
    lastcount = count
    s = np.unique(s)
    return s
コード例 #9
0
ファイル: savefile_test.py プロジェクト: quinao/pypcapfile
 def init_capfile(self, layers=0):
     """Initialise the capture file."""
     tfile = create_pcap()
     self.capfile = savefile.load_savefile(tfile, layers=layers)
     tfile.close()
     if os.path.exists(tfile.name):
         os.unlink(tfile.name)
コード例 #10
0
    def test_lazy_import(self):
        """
        Test the lazy packet parsing against the regular implementation.
        """
        # Load the savefile again, but create an iterator for the
        # packets instead of reading them all into memory at once.
        tfile = create_pcap()
        capfile_gen = savefile.load_savefile(tfile, lazy=True)

        # Create a list of packets using the iterator. This way the
        # length can be checked before comparing any content.
        packets = list(capfile_gen.packets)

        tfile.close()
        if os.path.exists(tfile.name):
            os.unlink(tfile.name)

        self.assertEqual(len(packets), len(self.capfile.packets),
                         'lazy parsing gives different number of packets!')

        # Compare the relevant parts of the packets.
        fields = [
            'timestamp', 'timestamp_us', 'capture_len', 'packet_len', 'packet'
        ]
        for act, ref in zip(packets, capfile_gen.packets):
            for field in fields:
                self.assertEqual(getattr(act, field), getattr(ref, field),
                                 'lazy parsing gives different data!')
コード例 #11
0
 def init_capfile(self, layers=0):
     """Initialise the capture file."""
     tfile = create_pcap()
     self.capfile = savefile.load_savefile(tfile, layers=layers)
     tfile.close()
     if os.path.exists(tfile.name):
         os.unlink(tfile.name)
コード例 #12
0
def parsepcapfilesingle(s, cs, filename):
    testcap = open(filename, 'rb')
    capfile = savefile.load_savefile(testcap, verbose=True)
    count = 1
    prevtimestamp = getTimeStamp(capfile.packets[0])
    basetimestamp = prevtimestamp
    print "filename:", filename, "len(capfile.packets):", len(capfile.packets)
    print basetimestamp
    ts = np.array([])
    for pkt in capfile.packets[1:]:
        timestamp = getTimeStamp(pkt)
        d = timestamp - prevtimestamp
        if (d == 0):
            count += 1
        else:
            #print "count:", count
            if (count > THRESH_PKT):
                #print prevtimestamp-basetimestamp, "count:", count
                s = np.append(s, prevtimestamp - basetimestamp)
                cs = np.append(cs, count)
            prevtimestamp = timestamp
            count = 1
    #print prevtimestamp, "count:", count
    if (count > THRESH_PKT):
        s = np.append(s, prevtimestamp - basetimestamp)
    lastfiletimestamp = timestamp
    lastcount = count
    s = np.unique(s)
    return s, cs
コード例 #13
0
def read_file():

    testcap = open('cia.pcap', 'rb')
    capfile = savefile.load_savefile(testcap, layers=2, verbose=True)

    # print the packets
    #print ('timestamp\teth src\t\t\teth dst\t\t\tIP src\t\tIP dst')

    pkt_list = []

    for pkt in capfile.packets:
        timestamp = pkt.timestamp
        # all data is ASCII encoded (byte arrays). If we want to compare with strings
        # we need to decode the byte arrays into UTF8 coded strings
        eth_src = pkt.packet.src.decode('UTF8')
        eth_dst = pkt.packet.dst.decode('UTF8')
        ip_src = pkt.packet.payload.src.decode('UTF8')
        ip_dst = pkt.packet.payload.dst.decode('UTF8')
        #print ('{}\t\t{}\t{}\t{}\t{}'.format(timestamp, eth_src, eth_dst, ip_src, ip_dst))

        a = {
            "eth_src": eth_src,
            "eth_dst": eth_dst,
            "ip_src": ip_src,
            "ip_dst": ip_dst
        }
        pkt_list.append(a)

    #print(pkt_list)
    return (pkt_list)
コード例 #14
0
ファイル: convert_pcap.py プロジェクト: larytet/video
def convertmf_image(arguments):
    '''
    Read PCAP frame by frame. The packet contains 4 bytes of timestamp, 2 bytes of fragment index
    '''
    while True:
        filename_in = arguments["--filein"]
        filename_out = arguments["--fileout"]
        offset_str = arguments["--offset"]
        (result, filecap) = open_file(filename_in, 'rb')
        if not result:
            logger.error("Failed to open file '{0}' for reading".format(filename_in))
            break


        (result, width, height) = parse_arguments_resolution(arguments["--resolution"])
        if not result:
            break
        
        (result, ffmpeg_path) = parse_arguments_ffmpeg(arguments)
        if not result:
            logger.error("This mode requires correct ffmpeg path. Exiting.")
            break
            
        packets = savefile.load_savefile(filecap, verbose=True).packets
        logger.info("Processing '{0}' packets, resolution {1}x{2}".format(
            len(packets), width, height))
        
        files = convertmf_dump_pcap(packets, filename_out)
        
        frame_index = 0
        for filename_out in files:
            convertmf_rgb565_png(filename_out, frame_index, width, height, ffmpeg_path)
            frame_index = frame_index + 1

        break
コード例 #15
0
def read_packet_dump():
    print('[DEBUG] Starting read_packet() method')
    testcap = open('example.pcap', 'rb')
    packet_dump = savefile.load_savefile(testcap, verbose=True)
    print('[INFO] Packet quantity: ' + str(len(packet_dump.packets)))
    print('[DEBUG] Returning packet dump')
    return packet_dump
コード例 #16
0
def start_pcap_parser(r, w, interface):
    """ Start parsing incoming packets """

    from pcapfile import savefile
    from os import write, close, fdopen

    try:
        cap = savefile.load_savefile(fdopen(r, 'rb'), verbose=False, lazy=True, layers=3)

        # packets are loaded lazily via a generator, so we just iterate over them
        for p in cap.packets:
            payload = p.packet.payload

            # discard L1/L2 packets
            if isinstance(payload, bytes):
                continue

            src_port, dst_port = payload.payload.src_port, payload.payload.dst_port
            src, dst = payload.src.decode('utf-8'), payload.dst.decode('utf-8')
            # {src}:{src_port} {dst}:{dst_port} {interface} {payload}
            data_string = '{}:{} {}:{} {} {}\n'.format(src, src_port, dst, dst_port, interface,
                                                       payload.payload.payload.decode('utf-8'))

            # convert to byte buffer and write to the output pipe
            buf = bytes(data_string, 'utf-8')
            write(w, buf)
    except KeyboardInterrupt:
        pass

    close(w)
コード例 #17
0
ファイル: sanicap.py プロジェクト: mlong168/chox
def sanitize(filepath_in,
             filepath_out=None,
             sequential=True,
             ipv4_mask=0,
             ipv6_mask=0,
             mac_mask=0,
             start_ipv4='10.0.0.1',
             start_ipv6='2001:aa::1',
             start_mac='00:aa:00:00:00:00'):

    if not filepath_out:
        timestamp = datetime.datetime.now().strftime('%y%m%d-%H%m%S')
        filepath_out = os.path.splitext(filepath_in)[
            0] + '_sanitized_' + timestamp + os.path.splitext(filepath_in)[1]

    mac_gen = MACGenerator(sequential=sequential,
                           mask=mac_mask,
                           start_mac=start_mac)
    ip4_gen = IPv4Generator(sequential=sequential,
                            mask=ipv4_mask,
                            start_ip=start_ipv4)
    ip6_gen = IPv6Generator(sequential=sequential,
                            mask=ipv6_mask,
                            start_ip=start_ipv6)

    with open(filepath_in) as capfile:

        #open cap file with pcapfile
        cap = savefile.load_savefile(capfile, verbose=False)

        #use scapy's pcapwriter
        pktwriter = PcapWriter(filepath_out, append=True)

        try:
            for pkt in cap.packets:

                #create scapy packet from pcapfile packet raw output
                pkt = Ether(pkt.raw())

                #MAC addresses
                pkt.src = mac_gen.get_mac(pkt.src)
                pkt.dst = mac_gen.get_mac(pkt.dst)

                #IP Address
                try:
                    pkt['IP'].src = ip4_gen.get_ip(pkt['IP'].src)
                    pkt['IP'].dst = ip4_gen.get_ip(pkt['IP'].dst)
                except IndexError:
                    pkt['IPv6'].src = ip6_gen.get_ip(pkt['IPv6'].src)
                    pkt['IPv6'].dst = ip6_gen.get_ip(pkt['IPv6'].dst)

                pktwriter.write(pkt)

        finally:
            pktwriter.close()

    return filepath_out.split('/')[-1]
コード例 #18
0
def getFile(filePath):
    try:
        testcap = open(filePath, "rb")
        capfile = savefile.load_savefile(testcap, layers=2, verbose=True)
        testcap.close()
        return capfile
    except:
        print("Invalid file path")
        exit()
コード例 #19
0
def main():
    testcap = open('./sample_pcap.pcap', 'rb')

    capfile = savefile.load_savefile(testcap, verbose=True)

    sender_src = re.compile(f"192.168.2..")
    sender_dst = re.compile(f"192.168.1..")

    goodput_per_packet(capfile.packets, sender_src, sender_dst)
コード例 #20
0
def learning_phase(target_ip:str, mix_ip:str, m:int, filename:str) -> list:
    cap = savefile.load_savefile(open(filename, 'rb'), layers=2, verbose=True)
    send_indices = [
            i
            for i, p in enumerate(cap.packets)
            if p.packet.payload.src.decode('UTF8') == target_ip
    ]
    batches = [next_batch(i, mix_ip, list(cap.packets)) for i in send_indices]
    return [ set(map(lambda x: x.packet.payload.dst.decode('UTF8'), batch)) for batch in batches ]
コード例 #21
0
def parsepcap(filename):
    testcap = open(filename, 'rb')
    capfile = savefile.load_savefile(testcap, verbose=True)
    print "filename:", filename, "len(capfile.packets):", len(capfile.packets)
    ts = np.array([])
    for pkt in capfile.packets[1:]:
        timestamp = pkt.timestamp * 1000 + pkt.timestamp_ms
        #print timestamp
        ts = np.append(ts, timestamp)
    return ts
コード例 #22
0
    def __init__(self, path, transform, layers=0, verbose=False, lazy=False):
        from pcapfile.savefile import load_savefile

        self.file_raw = open(path, 'rb')
        self.file_parsed = load_savefile(self.file_raw,
                                         layers=layers,
                                         verbose=verbose,
                                         lazy=lazy)

        super().__init__(self.file_parsed.packets, transform)
コード例 #23
0
def read(filename, verbose=False):
    """
    Read metadata from a *.pcap file and return a pandas dataframe
    """

    metadata = []
    num_tcp, num_udp, num_other = 0, 0, 0

    with open(filename, 'rb') as handle:
        cap = savefile.load_savefile(handle, layers=3, verbose=verbose)

        for frame in cap.packets:
            # link layer (ethernet)
            timestamp = frame.timestamp

            # network layer (ip)
            packet = frame.packet.payload
            src_team = ip2team(packet.src)
            dst_team = ip2team(packet.dst)
            protocol = packet.p

            # transport layer (udp, tcp)
            if isinstance(packet.payload, UDP):
                num_udp += 1

                segment = packet.payload
                len_payload = len(segment.payload)
                src_port = segment.src_port
                dst_port = segment.dst_port

            elif isinstance(packet.payload, TCP):
                num_tcp += 1

                datagram = packet.payload
                len_payload = len(datagram.payload)
                src_port = datagram.src_port
                dst_port = datagram.dst_port

            else:
                num_other += 1

                # use complete ip packet payload for other protocols
                len_payload = len(packet.payload)
                src_port, dst_port = None, None

            metadata.append((timestamp, protocol,
                             src_team, src_port, dst_team, dst_port, len_payload))

    # convert to pandas data frame
    metadata = pd.DataFrame(metadata, columns=COLUMNS).set_index(INDEX)
    print("Read metadata of {:,} packets ({:,} TCP, {:,} UDP, {:,} Other)"
          .format(metadata.shape[0], num_tcp, num_udp, num_other))

    return metadata
コード例 #24
0
def zipped_read_pcap():
    testcap = gzip.open(
        r'cia.log.5.pcap.gz', 'rb')
    capfile = savefile.load_savefile(testcap, layers=2, verbose=True)

    # print the packets
    print('timestamp\teth src\t\t\teth dst\t\t\tIP src\t\tIP dst')
    packet_list = []
    for pkt in capfile.packets:
        packet_list.append({'ip_src': pkt.packet.payload.src.decode('UTF8'),
                            'ip_dst': pkt.packet.payload.dst.decode('UTF8')})
    return(packet_list)
コード例 #25
0
ファイル: sanicap.py プロジェクト: rkornmeyer/cloud-pcap
def sanitize(
    filepath_in,
    filepath_out=None,
    sequential=True,
    ipv4_mask=0,
    ipv6_mask=0,
    mac_mask=0,
    start_ipv4="10.0.0.1",
    start_ipv6="2001:aa::1",
    start_mac="00:aa:00:00:00:00",
):

    if not filepath_out:
        timestamp = datetime.datetime.now().strftime("%y%m%d-%H%m%S")
        filepath_out = os.path.splitext(filepath_in)[0] + "_sanitized_" + timestamp + os.path.splitext(filepath_in)[1]

    mac_gen = MACGenerator(sequential=sequential, mask=mac_mask, start_mac=start_mac)
    ip4_gen = IPv4Generator(sequential=sequential, mask=ipv4_mask, start_ip=start_ipv4)
    ip6_gen = IPv6Generator(sequential=sequential, mask=ipv6_mask, start_ip=start_ipv6)

    with open(filepath_in) as capfile:

        # open cap file with pcapfile
        cap = savefile.load_savefile(capfile, verbose=False)

        # use scapy's pcapwriter
        pktwriter = PcapWriter(filepath_out, append=True)

        try:
            for pkt in cap.packets:

                # create scapy packet from pcapfile packet raw output
                pkt = Ether(pkt.raw())

                # MAC addresses
                pkt.src = mac_gen.get_mac(pkt.src)
                pkt.dst = mac_gen.get_mac(pkt.dst)

                # IP Address
                try:
                    pkt["IP"].src = ip4_gen.get_ip(pkt["IP"].src)
                    pkt["IP"].dst = ip4_gen.get_ip(pkt["IP"].dst)
                except IndexError:
                    pkt["IPv6"].src = ip6_gen.get_ip(pkt["IPv6"].src)
                    pkt["IPv6"].dst = ip6_gen.get_ip(pkt["IPv6"].dst)

                pktwriter.write(pkt)

        finally:
            pktwriter.close()

    return filepath_out.split("/")[-1]
コード例 #26
0
def parsePcapFile(pcapFileName, pcapIndex):
	# open the pcap file for reading
	pcap_file_f = open(path + "/" + pcapFileName)
	capfile = savefile.load_savefile(pcap_file_f, verbose=True)

	for pkt in capfile.packets:
		eth_frame = ethernet.Ethernet(pkt.raw())
	  	eth_type = eth_frame.type
	  	if(eth_type == 2048):
			ip_packet = ip.IP(binascii.unhexlify(eth_frame.payload))
			# foundIPs[pcapFileName][ip_packet.dst] = ip_packet.dst	# push ip into table
			foundIPs[pcapIndex][ip_packet.dst] = 1	# push ip into table
	pcap_file_f.close()		# close file descriptor
コード例 #27
0
ファイル: pcapFilter.py プロジェクト: hamouda94/PCAPFilter
def parsePcapFile(pcapFileName, pcapIndex):
    # open the pcap file for reading
    pcap_file_f = open(path + "/" + pcapFileName)
    capfile = savefile.load_savefile(pcap_file_f, verbose=True)

    for pkt in capfile.packets:
        eth_frame = ethernet.Ethernet(pkt.raw())
        eth_type = eth_frame.type
        if (eth_type == 2048):
            ip_packet = ip.IP(binascii.unhexlify(eth_frame.payload))
            # foundIPs[pcapFileName][ip_packet.dst] = ip_packet.dst	# push ip into table
            foundIPs[pcapIndex][ip_packet.dst] = 1  # push ip into table
    pcap_file_f.close()  # close file descriptor
コード例 #28
0
def main(argv):
    if len(argv) < 1:
      print "Error: please pass in pcap file."
      sys.exit(1)
    testcap = open(argv[0], 'rb')
    capfile = savefile.load_savefile(testcap, verbose=True)
    print capfile
    #cap = pyshark.FileCapture(argv[0])
    #print cap
    a=scapy.rdpcap(argv[0])
    scapy.sniff(offline=argv[0], prn=history_check)
    print packets[0][1].summary()
    print packets[0].show()
    print packets[0]["TCP"].seq
コード例 #29
0
def main(nazir_ip, mix_ip, nbr_partners, data):
    testcap = open(data, 'rb')
    capfile = savefile.load_savefile(testcap, layers=2, verbose=True)

    all_sets, distinct_sets = learn(nazir_ip, mix_ip, capfile, nbr_partners)
    ips = exclude(distinct_sets, all_sets)

    print("Found the following partners of Nazir:")
    sum = 0
    for ip_set in ips:
        ip = ip_set.pop()
        print("IP: {}".format(ip))
        sum += fc.hex_to_int(fc.ip_to_hex(ip))
    print("Sum of IPs: {}".format(sum))
コード例 #30
0
ファイル: dataset.py プロジェクト: SolitaryKnife/pckt_class
    def __init__(self,
                 path: str,
                 transform: _T.Callable,
                 layers: int = 0,
                 verbose: bool = False,
                 lazy: bool = False):
        from pcapfile.savefile import load_savefile

        self.file_raw = open(path, 'rb')
        self.file_parsed = load_savefile(self.file_raw,
                                         layers=layers,
                                         verbose=verbose,
                                         lazy=lazy)

        super().__init__(self.file_parsed.packets, transform)
コード例 #31
0
def extract_pcap(pcapfile):
    try:
        open_pcap = open(pcapfile, 'rb')
    except IOError:
        print("Le fichier n'existe pas !")

    pcap_file = savefile.load_savefile(open_pcap, layers=2, verbose=True)
    packets_ip = pcap_file.packets
    compteur = 0

    pair_matches = []
    matches = []
    clean_matches = []
    clean_pairs = re.compile('ipv4 packet from ([^<]* to [^<]*) carrying')

    for i in range(len(packets_ip)):
        pair_matches.append(str(pcap_file.packets[i].packet.payload))

    for i in pair_matches:
        clean_pair = re.findall(clean_pairs, i)
        if len(clean_pair) > 0:
            matches.append(clean_pair[0].replace('b', '').replace("'", ""))
            tmp = clean_pair[0].replace('to', '').replace('b',
                                                          '').replace("'", "")
            clean_matches.append(tmp)

    results = [[x, matches.count(x)] for x in set(matches)]
    clean_results = [[x, clean_matches.count(x)] for x in set(clean_matches)]

    for i in results:
        print(str(i[0]) + ' -> ' + str(i[1]) + ' fois')

    toGraphics = str(input("Voulez-vous générer des graphiques ? (o/N) "))
    if toGraphics.lower() == 'o':
        generate_graph(clean_results)
        toExport = str(input("Voulez-vous exporter en CSV ? (o/N) "))
        if toExport.lower() == 'o':
            export_as_csv(clean_results)
        else:
            print("Merci d'avoir utilisé notre PcapParser !")
            sys.exit(0)
    else:
        toExport = str(input("Voulez-vous exporter en CSV ? (o/N) "))
        if toExport.lower() == 'o':
            export_as_csv(clean_results)
        else:
            print("Merci d'avoir utilisé notre PcapParser !")
            sys.exit(0)
コード例 #32
0
ファイル: bpf-test.py プロジェクト: radare/r2scripts
def test_pcap(pcap_filename, bpf_filename):
	with open(pcap_filename, 'r') as testcap:
		capfile = savefile.load_savefile(testcap, layers=0, verbose=False)
		
		raw_packets = [pkt.raw() for pkt in capfile.packets]

		r = Context(bpf_filename)

		i = 1
		for rp in raw_packets:
			res = emulate_packet( r, binascii.hexlify(rp) )
			if res == Context.Accepted:
				print "packet %d : accepted" % i
			elif res == Context.Rejected:
				print "packet %d : rejected" % i
			i+=1
コード例 #33
0
def openPcapFile(fname):
    global sizePcap

    print("PCAP file is ", fname)

    fp = open(fname, 'rb')

    # The following call to the pycapfile library parses packets in the given PCAP
    # file down to the third  layer in the TCP/IP model: layer1 = data link (ethernet),
    # layer2 = network (IP), layer3 = transport(TCP, UDP).  It saves the parsed packets
    # in the list pcap.packets.
    pcap = savefile.load_savefile(fp, layers=3)
    sizePcap = len(pcap.packets)

    print(pcap)
    return pcap
コード例 #34
0
def my_pcap_reader(filename):
    testcap = open('../Samples/' + filename + ".pcap", 'rb')
    capfile = savefile.load_savefile(testcap, verbose=True)
    f = open('Results/Traces/' + filename + ".txt", 'w')

    # eth_frame = ethernet.Ethernet(capfile.packets[0].raw())
    # print eth_frame

    # ip_packet = ip.IP(binascii.unhexlify(eth_frame.payload))
    # print ip_packet

    for i in range(len(capfile.packets)):
        eth_frame = ethernet.Ethernet(capfile.packets[i].raw())
        try:
            ip_packet = ip.IP(binascii.unhexlify(eth_frame.payload))
        except Exception as e:
            print e.message
            continue

        packet = capfile.packets[i]

        packet_type = ""
        if ip_packet.p == 6:
            packet_type = "TCP"
        elif ip_packet.p == 17:
            packet_type = "UDP"
        else:
            packet_type = "Unknown_type"

        milisecond = str(packet.timestamp_ms)
        while len(milisecond) < 6:
            milisecond = "0" + milisecond
        t1 = float(str(packet.timestamp % 100000) + "." + milisecond)
        t2 = float(
            str(capfile.packets[0].timestamp % 100000) + "." +
            str(capfile.packets[0].timestamp_ms))
        t3 = t1 - t2
        t3 = "{0:.6f}".format(t3)

        row = str(i + 1) + " " + str(
            t3
        ) + " " + ip_packet.src + " " + ip_packet.dst + " " + packet_type + " " + str(
            packet.packet_len)
        print(row)
        f.write(row + "\n")
        print("------------------------------------------------")
    f.close()
コード例 #35
0
    def generate_graph(self, filename):
        # Open the .pcap file
        capture = open(filename, "rb")
        self.packets = savefile.load_savefile(capture, verbose=True).packets

        # Process the packets and generate the adjacency list based on the graph
        for p in self.packets:
            try:
                eth_frame = ethernet.Ethernet(p.raw())
                ip_packet = ip.IP(binascii.unhexlify(eth_frame.payload))
                source = ip_packet.src
                destination = ip_packet.dst
                packet_length = ip_packet.len
                protocol = constants.PROTOCOLS[ip_packet.p]

                #if source not in _infected_ips and source not in _normal_ips:
                #    u = ("WAN", protocol)
                #else:
                infected = True if source in _infected_ips else False
                u = (source, protocol, infected)

                #if destination not in _infected_ips and destination not in _normal_ips:
                #    v = ("WAN", protocol)
                #else:
                infected = True if destination in _infected_ips else False
                v = (destination, protocol, infected)

                if self.graph.has_edge(u, v):
                    self.graph[u][v]['weight'] += packet_length
                else:
                    self.graph.add_edge(u, v, weight=packet_length)

                self.statistics["total_packets"] += 1
                self.statistics["largest_packet"] = max(
                    packet_length, self.statistics["largest_packet"])

                if settings.VERBOSE:
                    if self.statistics["total_packets"] % 50000 == 0:
                        print("Processed " +
                              str(self.statistics["total_packets"]) +
                              " packets")

            except Exception, e:
                self.statistics["non_ip_packets"] += 1
コード例 #36
0
ファイル: bpftest.py プロジェクト: d4em0n/r2scripts
def test_pcap(pcap_filename, bpf_filename):
    with open(pcap_filename, 'r') as testcap:
        capfile = savefile.load_savefile(testcap, layers=0, verbose=False)

        raw_packets = [pkt.raw() for pkt in capfile.packets]

        r = Context(bpf_filename)

        i = 1
        for rp in raw_packets:
            res = emulate_packet(r, binascii.hexlify(rp))
            if res[1] == Context.Accepted:
                print "packet %d : accepted" % i
            elif res[1] == Context.Rejected:
                print "packet %d : rejected" % i
            else:
                print "packet %d : unknown" % i

            i += 1
コード例 #37
0
def get_all_flows_2(cap, attackers, write_to_file):
    #input file object and a file to write the flows in formation
    flow_file = open(write_to_file, 'a')
    flows = []
    timestamps = []
    for pkt in savefile.load_savefile(cap, lazy=True).packets:
        eth_frame = ethernet.Ethernet(pkt.raw())
        try:
            ip_packet = ip.IP(binascii.unhexlify(eth_frame.payload))
            tcp_packet = tcp.TCP(binascii.unhexlify(eth_frame.payload))
        except:
            continue
        if ip_packet.src.decode("utf-8") in attackers:
            flow_file.write('{},{},{},{},{},{}\n'.format(
                ip_packet.src.decode("utf-8"), ip_packet.dst.decode("utf-8"),
                str(tcp_packet.src_port), str(tcp_packet.dst_port),
                str(ip_packet.p), normalized_timestamp(pkt.timestamp)))
        else:
            continue
    flow_file.close()
コード例 #38
0
ファイル: Main.py プロジェクト: LoganRickert/packet_handler
 def run(self, primary_function):
     """
         Go through each file in the given directory
         that starts with pcap and for each packet
         pass it to the function primary function.
     """
     try:
         from pcapfile import savefile
     except ImportError:
         print "[-] Error: Make sure you have pcapfile installed!"
         print "[-] pip install pypcapfile"
         exit(0)
     
     for filename in self.files:
         print "Opening with", filename
         capfile = open(filename, 'rb')
         cap = savefile.load_savefile(capfile, verbose=True)
         
         for packet in cap.packets:
             primary_function(Packet(packet))
         
         capfile.close()
コード例 #39
0
ファイル: linklayer_test.py プロジェクト: abates/pypcapfile
 def init_capfile(self, layers=0):
     """
     Initialise capture file.
     """
     self.capfile = savefile.load_savefile(open('test_data/test.pcap', 'r'),
                                           layers=layers)
コード例 #40
0
ファイル: server.py プロジェクト: colemancda/LinPhoneSwift
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from twisted.web import server, resource
from twisted.internet import reactor, protocol
from twisted.protocols import sip
import json
import time
from datetime import datetime
from pcapfile import savefile

IP = '127.0.0.1'

audio_testcap = open('audio_in.pcap', "rb")
audio_capfile = savefile.load_savefile(audio_testcap, verbose=True)

video_testcap = open('video_in.pcap', "rb")
video_capfile = savefile.load_savefile(video_testcap, verbose=True)


class Simple(resource.Resource):
    isLeaf = True
    def render_GET(self, request):
        print str(datetime.now()), request

        if request.uri.startswith("/doorbots/history"):
            request.responseHeaders.addRawHeader(b"content-type", b"application/json")
            return json.dumps([])

        elif request.uri.startswith("/ring_devices"):
            request.responseHeaders.addRawHeader(b"content-type", b"application/json")
            return json.dumps({"doorbots":[],"authorized_doorbots":[],"stickup_cams":[],"chimes":[]})
コード例 #41
0
ファイル: convert_pcap.py プロジェクト: larytet/video
def convert_image(arguments):
    while True:
        filename_in = arguments["--filein"]
        filename_out = arguments["--fileout"]
        offset_str = arguments["--offset"]
        (result, filecap) = open_file(filename_in, 'rb')
        if not result:
            logger.error("Failed to open file '{0}' for reading".format(filename_in))
            break

        (result, fileout) = open_file(filename_out, 'wb')
        if not result:
            logger.error("Failed to open file '{0}' for writing".format(filename_out))
            break


        (result, width, height) = parse_arguments_resolution(arguments["--resolution"])
        if not result:
            break
        
        filename_image = filename_out+".png"

        # Read the PCAP file , save the payload (RGB565) in a separate file
        offset = int(offset_str, 16)
        packets = savefile.load_savefile(filecap, verbose=True).packets
        logger.info("Processing '{0}' packets, data offset {1}, resolution {2}x{3}".format(
            len(packets), hex(offset), width, height))
        for packet in packets:
            packet_raw = packet.raw()
            #frame_index = struct.unpack('<I', data[FRAME_INDEX_OFFSET:FRAME_INDEX_OFFSET+FRAME_INDEX_SIZE])
            #fragment_index = struct.unpack('<I', data[FRAGMENT_INDEX_OFFSET:FRAGMENT_INDEX_OFFSET+FRAGMENT_INDEX_SIZE])
            fileout.write(packet_raw[offset:])
        fileout.close()
        logger.info("Generated file {0}".format(filename_out))

        # Generate am image file
        img = Image.new('RGB', (width, height), "black")
        data = open(filename_out, 'rb').read() # read the RGB565 data from the filename_out 
        pixels = []
        count = len(data)
        expected_count = width * height
        index = 0
        # I assume R5 G6 B5
        while index <= (count-2):
            pixel = get_pixel_rgb565_1(data, index)
            pixels.append(pixel)
            index = index + 2
            if len(pixels) >= expected_count:
                if index < (count-2):
                    logger.warning("Too much data for the image {0}x{1}. Expected {2} pixels, got {3} pixels".format(
                        width, height, expected_count, count/2))
                break

        if len(pixels) < expected_count:
            logger.warning("Not enough data for the image {0}x{1}. Expected {2} pixels, got {3} pixels".format(
                width, height, expected_count, len(pixels)))
        img.putdata(pixels)
        img.save(filename_image)
        logger.info("Generated file {0}".format(filename_image))

        break
コード例 #42
0
ファイル: shattered.py プロジェクト: isislab/CTF-Solutions
from pcapfile import savefile
f=open('/Users/nickgregory/Downloads/shattered.pcap','rb')
cap=savefile.load_savefile(f)

match = "\xff\xd8\xff\xe0\x00\x10JFIF"
assembled = match
best = None
used = []

while True:
    for i, pkt in enumerate(cap.packets):
        data = pkt.raw()[54:]
        if match in data:
            offset = data.index(match)
            if len(data) - offset > best and i not in used:
                best = i
    
    if best is None:
        break
    data = cap.packets[best].raw()[54:]
    offset = data.index(match)
    print "Best match of ", match.encode('hex'), "is at pkt idx", best, "offset", offset
    
    assembled += data[offset+len(match):]
    match = data[-10:]
    used.append(best)
    best = None

f = open('out.jpg', 'wb')
f.write(assembled)
f.close()
コード例 #43
0
 def setUp(self):
     with open('pcapfile/test/test_data/http_conversation.pcap', 'rb') as f:
         self.packets = savefile.load_savefile(f, layers=3).packets
コード例 #44
0
__email__ = "*****@*****.**"


parser = argparse.ArgumentParser(description='Multicast Pcap Parser')
#parser.add_argument('-f', '--file',  help='Pcap file to open', required=True)
parser.add_argument('-f', '--file',  help='Pcap file to open')
args = parser.parse_args()


sequence_dict = {}  # format = key = seq, value = timestamp
if not args.file:
    testcap = open('/Users/shea/Desktop/CMT/Project2-XPM3/pcaps/mux/20161130.153835.001665773.pcap', 'rb')
else:
    testcap = open(args.file, 'rb')

capfile = savefile.load_savefile(testcap)
file_length = capfile.__length__()
for packet in range(0, file_length):
    pkt = capfile.packets[packet]
    data = binascii.b2a_qp(pkt.raw())
    strings = data.split()
    sequence = strings[-2].decode()
    if sequence not in sequence_dict:
        sequence_dict[sequence] = [pkt.timestamp, pkt.timestamp_us]
    else:
        # The sequence already exists, so it can just be subtracted. In 2016/11, datetime does not support nanoseconds.
        if pkt.timestamp_us > sequence_dict[sequence][1]:
            # the nano of T1 is greater than T0, simply subtract
            print('{0}, {1}'.format(pkt.timestamp_us - sequence_dict[sequence][1], sequence))
        else:  # the significant portions don't match, clock rolled over 1 sec or > 1 sec diff!
            print('WARN: T0:', sequence_dict[sequence][0], sequence_dict[sequence][1], 'T1', pkt.timestamp,
コード例 #45
0
ファイル: xeng_tvg_check.py プロジェクト: binchensolar/eovsa
# we only capture 1000 bytes per packet --
# header is 42+88 bytes => 870 bytes data
# => 108 complex values
# => 27 dual-pol baselines (+ some leftover)
n_bls = 27
#n_bls = 16*17/2 * 4

n_windows = 100
packet_cnts = np.zeros(n_windows)
time_slots = np.ones(n_windows) * -1
spectra = np.ones([n_windows, 4096, n_bls*4], dtype=complex)*2**31

for fn in args:
    with open(fn, 'r') as fh:
        print 'loading %s'%fn
        capfile = savefile.load_savefile(fh)
        for pn, p in enumerate(capfile.packets[::1]):
            if pn % 10000 == 0:
                print 'Read %d packets'%pn
            # check this is an X packet
            if p.packet_len == 4482:
                mcnt, chan, xeng, acc_num = decode_x_header(p.raw()[42:])
                t = mcnt >> 12 #the lower bits of mcnt are a channel ID (and should be fixed per roach)
                t = acc_num
                window = t%n_windows
                data = struct.unpack('>%dl'%(n_bls*2*4), p.raw()[hdr_len:hdr_len + (4*n_bls*8)])
                data_c = np.array(data[::2]) + 1j*np.array(data[1::2])
                data_c /= 1792.
                #print pn, acc_num, mcnt, xeng, mcnt>>12, '%4d'%(mcnt&(2**12-1)), 'chan: %4d'%chan, data_c[0]
                time_slots[window] = t
                packet_cnts[window] += 1
コード例 #46
0
ファイル: parse_pcap_flows.py プロジェクト: Sean-Chang/vsid
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
                  help="pcap file to parse", metavar="FILE")
parser.add_option("-p", "--protocol", dest="protocol",
                  help="protocol to add", metavar="PROTOCOL", default="INSERT_PROTOCOL")

(options, args) = parser.parse_args()

if options.filename is None or options.filename == "":
    print "ERROR: Filename cannot be empty\n\n"
    parser.print_help();
    sys.exit(1)

testcap = open(options.filename)
capfile = savefile.load_savefile(testcap, layers=2)

flows = {}
flowsList = []

for packet in capfile.packets :
    ip_packet = packet.packet.payload

    # UDP or TCP
    if ip_packet.p != 17 and ip_packet.p != 6:
        print "Not TCP or UDP"
        continue

    ip_tuple = IPv4Tuple(ip_packet.src, ip_packet.dst, ip_packet.p, ip_packet.payload, options.protocol)
    
    if ip_tuple.dict_repr() in flows:
コード例 #47
0
ファイル: metrics.py プロジェクト: VisBlank/rfc-dnscomp
#!/usr/bin/env python

import sys
import zlib
import lz4
import brotli
from pcapfile import savefile

for arg in sys.argv[1:]:
	pcap = savefile.load_savefile(open(arg, 'r'))
	for i in range(0, len(pcap.packets)):
		# strip IP/UDP
		pkt = pcap.packets[i].raw()[42:]
		# do not compress header
		zlib_len = len(zlib.compress(pkt[12:])) + 12
		lz4_len = len(lz4.compress(pkt[12:])) + 12
		bro_len = len(brotli.compress(pkt[12:])) + 12
		print('[%d] base: %4dB, zlib: %4dB, lz4: %4dB, brotli: %4dB' % \
		      (i, len(pkt), zlib_len, lz4_len, bro_len))

コード例 #48
0
ファイル: import_pcap.py プロジェクト: MrMugiwara/faraday
def main(workspace='', args=None, parser=None):

    parser.add_argument('-s', '--source', nargs='*', help='Filter packets by source'),
    parser.add_argument('-d', '--dest', nargs='*', help='Filter packets by destination'),

    parser.add_argument('--dry-run', action='store_true', help='Do not touch the database. Only print the object ID')

    parser.add_argument('-v', '--verbose', action='store_true', help='Verbose output from the pcapfile library.')
    parser.add_argument('pcap', help='Path to the PCAP file'),

    parsed_args = parser.parse_args(args)

    try:
        from pcapfile import savefile
        import pcapfile
    except ImportError:
        print 'capfile not found, please install it to use this plugin.' \
              ' You can do it executing pip2 install pcapfile in a shell.'
        return 1, None

    if not os.path.isfile(parsed_args.pcap):
        print "pcap file not found: " % parsed_args.pcap
        return 2, None

    testcap = open(parsed_args.pcap, 'rb')

    try:
        capfile = savefile.load_savefile(testcap, layers=2, verbose=parsed_args.verbose)
    except pcapfile.Error:
        print "Invalid pcap file"
        return 3, None

    print 'pcap file loaded. Parsing packets...'

    # Set() to store already added hosts. This will save an enormous amount of time by not querying the database
    # for hosts we already know are in Faraday
    added = set()

    for packet in capfile.packets:

        if packet.packet.type != 2048:
            continue

        src = packet.packet.payload.src
        dst = packet.packet.payload.dst

        if parsed_args.source and not src in parsed_args.source:
            continue

        if parsed_args.dest and not dst in parsed_args.dest:
            continue

        if src not in added:

            # Lets save additional queries for this IP, it will already be on the database anyway!
            added.add(packet.packet.payload.src)

            # Parsing of source field
            obj = factory.createModelObject(models.Host.class_signature, src,
                                            workspace, os=None, parent_id=None)

            old = models.get_host(workspace, obj.getID())

            if old is None:
                if not parsed_args.dry_run:
                    models.create_host(workspace, obj)
                print '%s\t%s' % (src, obj.getID())

        if dst not in added:

            # Lets save additional queries for this IP, it will already be on the database anyway!
            added.add(packet.packet.payload.dst)

            # Parsing of destination field
            obj = factory.createModelObject(models.Host.class_signature, dst,
                                            workspace, os=None, parent_id=None)

            old = models.get_host(workspace, obj.getID())

            if old is None:
                if not parsed_args.dry_run:
                    models.create_host(workspace, obj)
                print '%s\t%s' % (dst, obj.getID())

    return 0, None
コード例 #49
0
ファイル: rtp_gap.py プロジェクト: needlefall/haystack
from pcapfile import savefile
from pcapfile.protocols.linklayer import ethernet
from pcapfile.protocols.network import ip
from pcapfile.protocols.transport import udp
import binascii
import rtp
import sys


# Tested with Python 2.7 and pypcapfile 0.11.1

if len(sys.argv) < 2:
  print "Usage: python rtp_gap.py <pcap>\n"

capfile = savefile.load_savefile(open(sys.argv[1], 'rb'), verbose=False)

last_seq = 0
in_gap   = False

for packet in capfile.packets:
  eth_frame = ethernet.Ethernet(packet.raw())

  # Is it IP?
  if eth_frame.type == 0x0800:
    ip_packet = ip.IP(binascii.unhexlify(eth_frame.payload))

    # Is it UDP?
    if ip_packet.p == 17:
      udp_packet = udp.UDP(binascii.unhexlify(ip_packet.payload))

      # Simple RTP identification heuristic
コード例 #50
0
ファイル: test.py プロジェクト: fasia/mxmlmate
def main(inputs):
    for input_file in inputs:
        with open(input_file) as f:
            pcap = savefile.load_savefile(f, verbose=True)
            print(pcap)
コード例 #51
0
from pcapfile import savefile
testcap = open('my_file.pcap', 'rb')
capfile = savefile.load_savefile(testcap, verbose=True)
data=[];
temp=[]; 
row = 0;
column = 0;
for count, elem in enumerate(capfile.packets):
	#print "Packet number "+ str(count)+" capture_len is "+str(elem.capture_len)+"\r\n";
	temp_str = bytearray.fromhex(elem.packet)
	temp_str = temp_str[64:];
	for elem1 in temp_str:
		temp.append(elem1);
	temp_str = ":".join(map(str, temp_str));
	#temp_str= temp_str[:-1];
	data.append(temp_str);
	temp_length = elem.capture_len;
	#print "Data is " + temp_str[64:]
target = open('/home/itsy/Downloads/packet_data.txt', 'w');
target.truncate();
for packet in data:
	#dat=packet.split(':');
	#print [map(hex, l) for l in a]
	#dat = [int(x, 10) for x in dat];
	#print [map(hex, dat)]
	target.write(packet);
	target.write("\r\n");
target.close();
コード例 #52
0
ファイル: barbwire.py プロジェクト: jkohvakk/barbwire
def read_file(filename):
    capfile = savefile.load_savefile(open(filename, 'r'))
    return capfile