def httpPacketParse(buf, dbTableName, ts): tabel_line = {} # 数据库行存储结构 eth = dpkt.ethernet.Ethernet(buf) if eth.type != 2048: return ip = eth.data if ip.p != 6: return tcp = ip.data # 重点关注客户报文,网页内容暂不关注 if len(tcp.data) > 0: if tcp.dport == 80: try: http = dpkt.http.Request(tcp.data) except: # i = i+1 return find = 0 # print '====================================' for k, v in http.headers.iteritems(): if k == "referer": find = 1 break if find != 1: for k, v in http.headers.iteritems(): if k == "origin": break tabel_line["timestamp"] = ts tabel_line["sip"] = socket.inet_ntoa(ip.src) tabel_line["dip"] = socket.inet_ntoa(ip.dst) tabel_line["sport"] = tcp.sport tabel_line["dport"] = tcp.dport tabel_line["method"] = http.method url = urlformat(v) tabel_line["url"] = url[0] tabel_line["get"] = httpGetItemformat(tcp.data) tabel_line["accept-language"] = formatheader(http, "accept-language") tabel_line["accept-encoding"] = formatheader(http, "accept-encoding") tabel_line["connection"] = formatheader(http, "connection") tabel_line["accept"] = formatheader(http, "accept") tabel_line["host"] = formatheader(http, "host") tabel_line["referer"] = formatheader(http, "referer") tabel_line["origin"] = formatheader(http, "origin") tabel_line["Cache-Control"] = formatheader(http, "cache-control") tabel_line["Cookie"] = formatheader(http, "cookie") tabel_line["tcp_packet"] = None # 不存储报文 if url[1] == 0: if commonlib.IsSupportMutiThread() != 0: httpmutithread.PutDatatoQueue(tabel_line) else: httpdb.insert(dbTableName, tabel_line) tabel_line.clear()
def httpThreadDataProcess(dbTableName,ts): global dataQueue i = 0 while True: if False == dataQueue.empty(): i = i+1 tabelDB = GetDatatoQueue() httpdb.insert(dbTableName,tabelDB) if i%200 == 0: print '正在读取pcap文件到数据库中,请稍等' else : if httpThreadReadEndFlag() == 1: threadnotify() thread.exit_thread() time.sleep(0.1)
def httpThreadDataProcess(dbTableName, ts): global dataQueue i = 0 while True: if False == dataQueue.empty(): i = i + 1 tabelDB = GetDatatoQueue() httpdb.insert(dbTableName, tabelDB) if i % 200 == 0: print '正在读取pcap文件到数据库中,请稍等' else: if httpThreadReadEndFlag() == 1: threadnotify() thread.exit_thread() time.sleep(0.1)
def httpPacketParse(buf, dbTableName, ts): tabel_line = {} #数据库行存储结构 eth = dpkt.ethernet.Ethernet(buf) if eth.type != 2048: return ip = eth.data if ip.p != 6: return tcp = ip.data #重点关注客户报文,网页内容暂不关注 if len(tcp.data) > 0: if tcp.dport == 80: try: http = dpkt.http.Request(tcp.data) except: #i = i+1 return find = 0 #print '====================================' for k, v in http.headers.iteritems(): if k == 'referer': find = 1 break if find != 1: for k, v in http.headers.iteritems(): if k == 'origin': break tabel_line['timestamp'] = ts tabel_line['sip'] = socket.inet_ntoa(ip.src) tabel_line['dip'] = socket.inet_ntoa(ip.dst) tabel_line['sport'] = tcp.sport tabel_line['dport'] = tcp.dport tabel_line['method'] = http.method url = urlformat(v) tabel_line['url'] = url[0] tabel_line['get'] = httpGetItemformat(tcp.data) tabel_line['accept-language'] = formatheader( http, 'accept-language') tabel_line['accept-encoding'] = formatheader( http, 'accept-encoding') tabel_line['connection'] = formatheader(http, 'connection') tabel_line['accept'] = formatheader(http, 'accept') tabel_line['host'] = formatheader(http, 'host') tabel_line['referer'] = formatheader(http, 'referer') tabel_line['origin'] = formatheader(http, 'origin') tabel_line['Cache-Control'] = formatheader(http, 'cache-control') tabel_line['Cookie'] = formatheader(http, 'cookie') tabel_line['tcp_packet'] = None #不存储报文 if url[1] == 0: if commonlib.IsSupportMutiThread() != 0: httpmutithread.PutDatatoQueue(tabel_line) else: httpdb.insert(dbTableName, tabel_line) tabel_line.clear()