def __output_sqlite3(self, ip_statistic_tuple): """ Args: ip_statistic_tuple: sqliteconf: Returns: """ if not (self.sqlite3_output_path and os.path.exists(self.sqlite3_output_path)): self.sqlite3_output_path = mills.path("data/ip_packet/ip_statistic.db") self.sqlite3_renew = True if not (self.sqlite3_output_schema and os.path.exists(self.sqlite3_output_schema)): self.sqlite3_output_schema = mills.path("data/ip_packet/ip_statistic.sql") so = SQLiteOper(dbpath=self.sqlite3_output_path, db_is_new=bool(self.sqlite3_renew)) # 创建表 so.createTable(self.sqlite3_output_schema) fields = ["direct", "ts", "ip_protocol_type", "src_ip", "dst_ip", "src_port", "dst_port", "packet_length", "src_ip_geo", "dst_ip_geo", "src_port_service", "dst_port_service"] data_dict = dict(zip(fields, ip_statistic_tuple)) so.replaceData2SQLite3(op='insert', tablename='ip_statistic_tuple', fields=fields, data_dict=data_dict)
def __init__(self, servicefile=None): """ :param servicefile: """ if not servicefile: servicefile = mills.path("etc/nmap-services") self.servicefile = servicefile self.portservicedict = dict() if os.path.exists(servicefile): with codecs.open(servicefile, encoding='utf-8', mode='rb') as fr: for line in fr: line = line.strip() if line.startswith("#"): continue parts = re.split(r'\s+', line) if len(parts) >= 2: self.portservicedict[parts[1]] = parts[0] else: logging.error("[FILENOTEXITS]: %s" % servicefile)
def __init__(self, file_name=None): self.source_name = "qqwry" self.qqwry_source_url = { 'copywrite_url': 'http://update.cz88.net/ip/copywrite.rar', 'data_url': 'http://update.cz88.net/ip/qqwry.rar', } self.qqwry_down_path = { 'ip': mills.path("data/ipdb/qqwry.dat"), 'copywrite_rar': mills.path("data/ipdb/copywrite.rar"), "qqwry_rar": mills.path("data/ipdb/qqwry.rar") } if not file_name: file_name = self.qqwry_down_path["ip"] if not os.path.exists(file_name): self.db_update() with open(file_name, 'r') as dbf: self.data = dbf.read() self.startindex, self.lastindex = unpack('II', self.data[:8]) self.count = (self.lastindex - self.startindex) / 7 + 1 self.ip_data_field = ['country', 'isp']
def __del__(self): if self.tcp_file_fh: self.tcp_file_fh.close() if self.udp_file_fh: self.udp_file_fh.close() if self.ip_file_fh: self.ip_file_fh.close() for app_proto, app_fh in self.app_proto_fhs.items(): if app_fh: app_fh.close() # delete empty file protocol_file_path = mills.path(self.file_output_path, "%s.txt" % app_proto) if mills.isFileEmpty(protocol_file_path): os.remove(protocol_file_path)
def __init__(self, pcap_file=None, device=None, bpf_filter="tcp", dst_tcp_port_filter=None, dst_tcp_ip_filter=None, src_tcp_port_filter=None, src_tcp_ip_filter=None, udp_port_filter=None, udp_ip_filter=None, data_level=1, data_stream_direct=2, std_output_enable=1, file_output_path=None, protocol_parse_conf=None, is_handle_tcp=1, is_handle_udp=1, is_handle_ip=1, sqlite3_output_enable=1, sqlite3_output_path=None, sqlite3_output_schema=None, sqlite3_renew=False): """ Args: pcap_file: device: bpf_filter: dst_port_filter: dst_ip_filter: src_port_filter: src_ip_filter: data_level: data_stream_direct: std_output_enable: file_tcpsession_path: protocol_parse_conf: """ self.is_handle_tcp = is_handle_tcp self.is_handle_udp = is_handle_udp self.is_handle_ip = is_handle_ip self.bpf_filter = bpf_filter self.dst_tcp_port_filter = dst_tcp_port_filter self.dst_tcp_ip_filter = dst_tcp_ip_filter self.src_tcp_port_filter = src_tcp_port_filter self.src_tcp_ip_filter = src_tcp_ip_filter self.udp_ip_filter = udp_ip_filter self.udp_port_filter = udp_port_filter self.device = device self.pcap_file = pcap_file if pcap_file: nids.param("filename", pcap_file) elif device: nids.param("device", device) if bpf_filter: nids.param("pcap_filter", bpf_filter) ## bpf restrict to TCP only, note self.data_level = data_level self.data_stream_direct = data_stream_direct self.std_output_enable = std_output_enable self.file_output_path = file_output_path self.protocol_parse_conf = protocol_parse_conf nids.param("scan_num_hosts", 0) # disable portscan detection nids.chksum_ctl([('0.0.0.0/0', False)]) # disable checksumming nids.param("pcap_timeout", 64) nids.param("multiproc", 1) nids.param("tcp_workarounds", 1) # sqlite3 conf which store ip_handle statistic info self.sqlite3_output_enable = sqlite3_output_enable self.sqlite3_output_path = sqlite3_output_path self.sqlite3_output_schema = sqlite3_output_schema self.sqlite3_renew = sqlite3_renew # local ip if self.is_handle_ip: self.local_ip = networktools.get_local_ip(self.device) # var self.tcp_file_fh = None self.udp_file_fh = None self.ip_file_fh = None self.app_proto_fhs = {} if self.file_output_path: # 设置文件输出 if os.path.exists(self.file_output_path) and os.path.isdir( self.file_output_path): # delete old data mills.rm_dir(self.file_output_path) os.mkdir(self.file_output_path) tcp_file_path = mills.path(self.file_output_path, "tcp.txt") self.tcp_file_fh = codecs.open(tcp_file_path, mode='wb', encoding='utf-8', errors='ignore') udp_file_path = mills.path(self.file_output_path, "udp.txt") self.udp_file_fh = codecs.open(udp_file_path, mode='wb', encoding='utf-8', errors='ignore') ip_file_path = mills.path(self.file_output_path, "ip.txt") self.ip_file_fh = codecs.open(ip_file_path, mode='wb', encoding='utf-8', errors='ignore') port_list = set() proto_list = set() for port_filter in [ self.dst_tcp_port_filter, self.src_tcp_port_filter, self.udp_port_filter ]: for port in port_filter: port_list.add(port) for port in port_list: protocol = self.which_protocol_parse(port) if protocol: proto_list.add(protocol) for protocol in proto_list: protocol_file_path = mills.path(self.file_output_path, "%s.txt" % protocol) protocol_file_fh = codecs.open(protocol_file_path, mode='wb', encoding='utf-8', errors='ignore') self.app_proto_fhs[protocol] = protocol_file_fh
src_tcp_ip_filter=co.src_tcp_ip_filter, src_tcp_port_filter=co.src_tcp_port_filter, udp_ip_filter=co.udp_ip_filter, udp_port_filter=co.udp_port_filter, data_level=co.data_level, data_stream_direct=co.data_stream_direct, std_output_enable=co.std_output_enable, file_output_path=co.file_output_path, protocol_parse_conf=co.protocol_parse_conf, is_handle_ip=co.is_handle_ip, is_handle_tcp=co.is_handle_tcp, is_handle_udp=co.is_handle_udp, sqlite3_output_enable=co.sqlite3_output_enable, sqlite3_output_path=co.sqlite3_output_path, sqlite3_output_schema=co.sqlite3_output_schema, sqlite3_renew=co.sqlite3_renew) sho.run() if __name__ == "__main__": """ """ import lib.mills as mills import lib.logger as logger logger.generate_special_logger(level=logging.DEBUG, logtype="tcpsession", curdir=mills.path("./log"), ismultiprocess=False) main()
:param asset_ip: :param asset_port: :return: """ for l in self.search_pcap(asset_port=asset_port, asset_ip=asset_ip): print l if __name__ == '__main__': import lib.mills as mills logger.generate_special_logger(level=logging.INFO, logtype="pcapanalyis", curdir=mills.path("log"), ismultiprocess=False) from optparse import OptionParser pcap_path = mills.path("data/pcap_pub/wireshark/mysql_complete.pcap") assetip = None assetport = None parser = OptionParser() parser.add_option("--pcapfile", dest="pcapfile", action='store', type='string', help="special the pcap file path", default=pcap_path)
yield result except Exception as e: logging.error("[DNS_PARSE_ERROR]: %r" % e) if __name__ == "__main__": import lib.logger as logger import codecs import json logger.generate_special_logger(level=logging.INFO, logtype="dns_parse", curdir=mills.path("./log")) pcap_file = mills.path("data/tcpudpdata/dns.pcap/udp.txt") lines = [] with codecs.open(pcap_file, mode='rb', encoding='utf-8') as fr: for line in fr: if line: lines.append(line) for i in range(0, len(lines)): pqa = DNSProtocol(eval(lines[i])) result = pqa.parse_data() for i in result: print json.dumps(i, indent=4)
:param asset_ip: :param asset_port: :return: """ for l in self.search_pcap(asset_port=asset_port, asset_ip=asset_ip): print l if __name__ == '__main__': import lib.mills as mills logger.generate_special_logger(level=logging.INFO, logtype="pcapanalyis", curdir=mills.path("log"), ismultiprocess=False) from optparse import OptionParser pcap_path = mills.path("data/pcap_private/redisop.pcap") assetip = None assetport = None parser = OptionParser() parser.add_option("--pcapfile", dest="pcapfile", action='store', type='string', help="special the pcap file path", default=pcap_path)
SSDPCount += 1 elif srcP == "53": global DNSCount DNSCount += 1 global UDPPckCount UDPPckCount += 1 f.close() if __name__ == '__main__': import lib.mills as mills logger.generate_special_logger(level=logging.INFO, logtype="pcapanalyis", curdir=mills.path("log"), ismultiprocess=False) from optparse import OptionParser assetip = None assetport = None from optparse import OptionParser parser = OptionParser() parser.add_option( "--pcapfile", dest="pcapfile", action='store', type='string',