def process(self, data): pcapkit = self._pcapkit logging.getLogger('pcapkit').disabled = True merge = self.args.merge with VirtualFileSystem() as fs: vf = VirtualFile(fs, data, 'pcap') extraction = pcapkit.extract( fin=vf.path, engine='scapy', store=False, nofile=True, extension=False, tcp=True, strict=True) tcp: list = list(extraction.reassembly.tcp) count, convo = 0, None src_buffer = MemoryFile() dst_buffer = MemoryFile() for stream in tcp: this_convo = Conversation.FromID(stream.id) if this_convo != convo: if count and merge: if src_buffer.tell(): yield self.labelled(src_buffer.getvalue(), **convo.src_to_dst()) src_buffer.truncate(0) if dst_buffer.tell(): yield self.labelled(dst_buffer.getvalue(), **convo.dst_to_src()) dst_buffer.truncate(0) count = count + 1 convo = this_convo for packet in stream.packets: if not merge: yield self.labelled(packet.data, **this_convo.src_to_dst(), stream=count) elif this_convo.src == convo.src: src_buffer.write(packet.data) elif this_convo.dst == convo.src: dst_buffer.write(packet.data) else: raise RuntimeError(F'direction of packet {convo!s} in conversation {count} is unknown')
def get_extraction(self): pcap_path = f"/tmp/{self.analysis_uuid}.pcap" extraction = pcapkit.extract(fin=pcap_path, store=True, nofile=True, ip=True, tcp=True, strict=True) return extraction
def PCAP_analysis(): for filename in os.listdir(directory): if filename.endswith("pcap"): filename_list = filename.split(".") filename_no_ext = filename_list[0] ljson = pcapkit.extract(fin=filename_no_ext, fout='out/{}.json'.format(filename_no_ext), format='json', store=False, engine="dpkt")
def pcaptojson(file) -> dict: return (pcapkit.extract(fin=file, nofile=True, format='json', auto=False, engine='deafult', extension=False, layer='Transport', tcp=True, ip=True, strict=True, store=False))
def PCAP_analysis(): for filename in os.listdir(directory): if filename.endswith("pcap"): print("Filename: ", filename) filename_list = filename.split(".") filename_no_ext = filename_list[0] print("Extracting PCAP file") input_bucket = storage_client.get_bucket(input_bucket_name) blob = bucket.blob(filename) blob.download_to_filename("tmp/pcap/{}".format(filename)) #Extracting PCAP data from PCAP files start = time.time() ljson = pcapkit.extract( fin="tmp/pcap/{}".format(filename), fout='tmp/json/{}.json'.format(filename_no_ext), format='json', store=False, engine="dpkt") end = time.time() total = end - start print("It took " + str(total) + " seconds") json_data = json.loads( open('out/{}.json'.format(filename_no_ext)).read()) print("Deleting Header") del json_data['Global Header'] with open('revised/{}.json'.format(filename_no_ext), 'w') as f: #dumping json data without a header into a new file. json.dump(json_data, f) #transforming json into newline delimited json for bigquery command = os.popen( "cat tmp/json/{}.json | jq -c '.[]' > tmp/nd_json/{}.json". format(filename_no_ext, filename_no_ext)) # print("Uploading to GCS") output_bucket = storage_client.get_bucket(output_bucket_name) blob = bucket.blob(filename_no_ext + "_ND" + '.json') blob.upload_from_filename( "tmp/nd_json/{}.json".format(filename_no_ext)) return "Success"
import os import pprint import textwrap import time import chardet import pcapkit os.system('> ../sample/out') extraction = pcapkit.extract( fin='../sample/test.pcap', engine=pcapkit.DPKT, store=False, tcp=True, verbose=True, strict=True, nofile=True, ) # pprint.pprint(extraction.frame) with open('../sample/out', 'a') as file: # pprint.pprint(extraction.reassembly.tcp) for datagram in extraction.reassembly.tcp: print(f'NotImplemented = {datagram.NotImplemented}') file.write(f'NotImplemented = {datagram.NotImplemented}') file.write('\n') print(f'index = {datagram.index}') file.write(f'index = {datagram.index}') file.write('\n')
# # if __name__ == "__main__": # arp('/Users/junaidtariq/Downloads/teste.pcap') import dpkt file = open('/Users/junaidtariq/Downloads/teste.pcap', 'rb') # pcap = dpkt.pcap.Reader(file) # for ts, buf in pcap: # eth = dpkt.ethernet.Ethernet(buf) # print(eth) # # (ts,buf) = next(pcap) # eth = dpkt.ethernet.Ethernet(buf) import pcapkit # dump to a PLIST file with no frame storage (property frame disabled) plist = pcapkit.extract(fin='/Users/junaidtariq/Downloads/teste.pcap', fout='out.plist', format='plist', store=False) # dump to a JSON file with no extension auto-complete json = pcapkit.extract(fin='/Users/junaidtariq/Downloads/teste.pcap', fout='out.json', format='json', extension=False) # dump to a folder with each tree-view text file per frame tree = pcapkit.extract(fin='/Users/junaidtariq/Downloads/teste.pcap', fout='out', format='tree', files=True)
# -*- coding: utf-8 -*- import pcapkit extraction = pcapkit.extract(fin='../sample/ipv6.pcap', fout='../sample/ipv6', format='tree', files=True, verbose=True, store=False)
# -*- coding: utf-8 -*- import pprint import pcapkit trace = pcapkit.extract(fin='../sample/http.pcap', nofile=True, verbose=True, trace=True, trace_format='json', trace_fout='../sample/trace') pprint.pprint(trace.trace)
# -*- coding: utf-8 -*- import pcapkit plist = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/out.plist', format='plist') json = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/out.json', format='json') # html = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/out.js', format='html') tree = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/out.txt', format='tree') # json = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/out.xml', format='xml')
import numpy as np import pcapkit # configuration CHIP = '43455c0' BW = 80 FILE = './out.pcap' FOUT = 'out.json' NPKTS_MAX = 20 # read file HOFFSET = 16 # header offset NFFT = BW * 3.2 # fft size extraction = pcapkit.extract(fin=FILE, fout=FOUT, layer='Link', extension=False) n = min(NPKTS_MAX, len(extraction.frame)) # print('n:', n) def fread(fid, nelements, dtype): if dtype is np.str: dt = np.uint8 # WARNING: assuming 8-bit ASCII for np.str! else: dt = dtype data_array = np.fromfile(fid, dt, nelements) # print(data_array) data_array.shape = (nelements, 1)
# -*- coding: utf-8 -*- import pcapkit json = pcapkit.extract(fin='../sample/http.pcap', fout='../sample/http', format='json', files=True, store=True, verbose=True, ip=True, tcp=True, strict=False, trace=True, trace_format='json', trace_fout='../sample/trace')
import pcapkit input_file = "data/facebook_audio1a.pcap" output_file = "out.json" json = pcapkit.extract(fin=input_file, fout=output_file, format='json', extension=False)
def PCAP_analysis(): for filename in os.listdir(directory): if filename.endswith("pcap"): print("Filename: ", filename) filename_list = filename.split(".") filename_no_ext = filename_list[0] print("Extracting PCAP file") input_bucket = storage_client.get_bucket(input_bucket_name) blob = bucket.blob(filename) blob.download_to_filename("tmp/pcap/{}".format(filename)) #Extracting PCAP data from PCAP files start = time.time() ljson = pcapkit.extract( fin="tmp/pcap/{}".format(filename), fout='tmp/json/{}.json'.format(filename_no_ext), format='json', store=False, engine="dpkt") end = time.time() total = end - start print("It took " + str(total) + " seconds") json_data = json.loads( open('out/{}.json'.format(filename_no_ext)).read()) print("Deleting Header") # del json_data['Global Header'] with open('revised/{}.json'.format(filename_no_ext), 'w') as f: #dumping json data without a header into a new file. json.dump(json_data, f) #transforming json into newline delimited json for bigquery command = os.popen( "cat tmp/json/{}.json | jq -c '.[]' > tmp/nd_json/{}.json". format(filename_no_ext, filename_no_ext)) # print("Uploading to GCS") output_bucket = storage_client.get_bucket(output_bucket_name) blob = bucket.blob(filename_no_ext + "_ND" + '.json') blob.upload_from_filename( "tmp/nd_json/{}.json".format(filename_no_ext)) # bq_pcap_table = bq_client.get_table('kubeflow-test-260816.PCAP.test2') # #grabbing schema from schema.json file # json_schema = json.loads(open('schema.json').read()) # job_config = bigquery.LoadJobConfig() # job_config.schema = json_schema # job_config.source_format = 'NEWLINE_DELIMITED_JSON' # job_config.max_bad_records = 200 # print("Loading to BQ") # bq_client.load_table_from_uri( # "gs://pcap-files/" + filename_no_ext + ".json", # bq_pcap_table, job_config=job_config) return "Success"
import pcapkit import csv #from cyclic import * filename = input('Enter the filename :') extracter = pcapkit.extract(fin=filename, nofile=True, engine='dpkt') k = 1 intermediate_str = '' cipher_text = '' total = '' for i in extracter.frame: try: if i['data']['p'] == 6: #print(i) try: payload = i['data']['data']['data'] #print("payload=",payload) intermediate_str = payload.__str__() if intermediate_str != "b''": #print(st) cipher_text = intermediate_str[2:-1] #print("ciphertext obtained from packet",cipher_text) total += cipher_text except: pass except: pass
# -*- coding: utf-8 -*- import pprint import pcapkit extraction = pcapkit.extract( fin='../sample/http6.cap', # fout='../sample/http.txt', format='tree', store=False, tcp=True, verbose=True, nofile=True, strict=True, extension=False) # pprint.pprint(extraction.reassembly.tcp) print() for reassembly in extraction.reassembly.tcp: for packet in reassembly.packets: if pcapkit.HTTP in packet.protochain: # with open('../sample/37fc254c-68c1-4677-9ed1-806c5eab8acb.dat', 'ab') as file: # file.write(packet.info.raw.header or b'') pprint.pprint(packet.info) else: print(packet) print()
# -*- coding: utf-8 -*- import pcapkit default = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/engines/default.txt', format='tree', engine='default') pyshark = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/engines/pyshark.txt', format='tree', engine='pyshark') scapy = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/engines/scapy.txt', format='tree', engine='scapy') dpkt = pcapkit.extract(fin='../sample/in.pcap', fout='../sample/engines/dpkt.txt', format='tree', engine='dpkt') pipeline = pcapkit.extract(fin='../sample/in.pcap', nofile=True, engine='pipeline') server = pcapkit.extract(fin='../sample/in.pcap', nofile=True, engine='server')
# -*- coding: utf-8 -*- import statistics import time import pcapkit for engine in {'default', 'pyshark', 'scapy', 'dpkt', 'pipline', 'server'}: lid = list() for index in range(1, 101): now = time.time() extraction = pcapkit.extract(fin='../sample/in.pcap', store=False, nofile=True, engine=engine) delta = time.time() - now # print(f'[{engine}] No. {index:>3d}: {extraction.length} packets extracted in {delta} seconds.') lid.append(float(delta)) avetime = statistics.mean(lid) average = avetime / extraction.length print(f'Report: [{engine}] {average} seconds per packet.')
# -*- coding: utf-8 -*- import pcapkit for frame in pcapkit.extract(fin='../sample/http.pcap', nofile=True, auto=False): # check if this frame contains HTTP if pcapkit.HTTP in frame: # print frame number & its protocols chain print(f' - {frame.name}: {frame.protochain}') # # # fetch http info dict # # http = dict( # # receipt = 'request' | 'response', # # # request header # # request = dict( # # method = METHOD, # # target = TARGET, # # version = '1,0' | '1.1', # # ) # # # response header # # response = dict( # # version = '1,0' | '1.1', # # status = STATUS, # # phrase = PHRASE, # # ) # # # other fields # # ... # # ) # http = frame[pcapkit.HTTP]
def test(): return pcapkit.extract(fin='../sample/in.pcap', store=False, nofile=True, engine=engine)