def _get_filters(self): filters = set() for item in (self.path, self.headers.get("referer", "")): if "ip[]" in item: for _ in re.findall(r"\bip\[\]=([\d./\-]+)(?:&|\Z)", item): if "/" in _: prefix, mask = _.split("/", 1) mask = int(mask) start_int = addr_to_int(prefix) & make_mask(mask) end_int = start_int | ((1 << 32 - mask) - 1) if (end_int - start_int) > MAX_IP_FILTER_RANGE: raise for address in xrange(start_int, end_int + 1): filters.add(address) elif "-" in _: start_address, end_address = _.split("-", 1) start_int = addr_to_int(start_address) end_int = addr_to_int(end_address) if (end_int - start_int) > MAX_IP_FILTER_RANGE: raise for address in xrange(start_int, end_int + 1): filters.add(address) else: filters.add(addr_to_int(_)) return filters
def init_sensor(): global _cap global _datalink items = [] for cmd, regex in (("ifconfig", r"inet addr:([\d.]+) .*Mask:([\d.]+)"), ( "ipconfig", r"IPv4 Address[^\n]+([\d.]+)\s+Subnet Mask[^\n]+([\d.]+)")): try: items = re.findall(regex, subprocess.check_output(cmd)) break except OSError: pass for ip, mask in items: LOCAL_ADDRESSES.append( (addr_to_int(ip) & addr_to_int(mask), addr_to_int(mask))) try: if not os.path.isdir(LOG_DIRECTORY): os.makedirs(LOG_DIRECTORY) except Exception, ex: if "Permission denied" in str(ex): exit("[x] please run with sudo/Administrator privileges") else: raise
def init_sensor(): global _cap global _datalink items = [] for cmd, regex in ( ("ifconfig", r"inet addr:([\d.]+) .*Mask:([\d.]+)"), ("ipconfig", r"IPv4 Address[^\n]+([\d.]+)\s+Subnet Mask[^\n]+([\d.]+)"), ): try: items = re.findall(regex, subprocess.check_output(cmd)) break except OSError: pass for ip, mask in items: LOCAL_ADDRESSES.append((addr_to_int(ip) & addr_to_int(mask), addr_to_int(mask))) try: if not os.path.isdir(LOG_DIRECTORY): os.makedirs(LOG_DIRECTORY) except Exception, ex: if "Permission denied" in str(ex): exit("[x] please run with sudo/Administrator privileges") else: raise
def _dataset(self): result = "\n" dates = set() rows = [] indexes = {} filters = self._get_filters() for filename in sorted(glob.glob(os.path.join(LOG_DIRECTORY, "*.csv")))[-config.TRENDLINE_PERIOD:]: with open(filename, "rb") as f: match = re.search(r"([\d-]+)\.csv", filename) if match: date = match.group(1) else: continue reader = csv.DictReader(f, delimiter=' ') for row in reader: key = (row["proto"], row["dst_port"], row["dst_ip"], row["src_ip"]) if filters and not (addr_to_int(row["src_ip"]) in filters or addr_to_int(row["dst_ip"]) in filters): continue if key not in indexes: indexes[key] = len(rows) rows.append(row) else: index = indexes[key] rows[index]["first_seen"] = min(int(rows[index]["first_seen"]), int(row["first_seen"])) rows[index]["last_seen"] = max(int(rows[index]["last_seen"]), int(row["last_seen"])) rows[index]["count"] = int(rows[index]["count"]) + int(row["count"]) for row in rows: try: port = int(row['dst_port']) port_name = MISC_PORTS.get(port) or socket.getservbyport(port, row['proto'].lower()) except Exception: port_name = None finally: result += "[" for column in ("proto", "dst_port", "dst_ip", "src_ip", "first_seen", "last_seen", "count"): if "_seen" in column: result += '"%s",' % datetime.datetime.utcfromtimestamp(int(row[column])).strftime(TIME_FORMAT) elif "_port" in column and port_name: result += '"%s (%s)",' % (row[column], port_name) else: result += '"%s",' % row[column] result += "],\n" return result
def _trendline_data(self): result = "\n" series = {} dates = set() filters = self._get_filters() for filename in sorted(glob.glob(os.path.join( LOG_DIRECTORY, "*.csv")))[-config.TRENDLINE_PERIOD:]: with open(filename, "rb") as f: match = re.search(r"([\d-]+)\.csv", filename) if match: date = match.group(1) else: continue reader = csv.DictReader(f, delimiter=' ') for row in reader: if filters and not (addr_to_int(row["src_ip"]) in filters or addr_to_int( row["dst_ip"]) in filters): continue try: port = int(row['dst_port']) port_name = MISC_PORTS.get( port) or socket.getservbyport( port, row['proto'].lower()) except Exception: port_name = None finally: serie = "%s%s%s" % ( row['proto'].upper(), " %s" % row['dst_port'] if row['dst_port'].isdigit() else "", " (%s)" % port_name if port_name else "") if serie not in series: series[serie] = {} if date not in series[serie]: series[serie][date] = 0 series[serie][date] += 1 dates.add(date) keys = series.keys() if keys: last_date = max(dates) totals = {} for key in list(keys): if not filters: if any(series[key].get(date, 0) < config.TRENDLINE_DAILY_THRESHOLD for date in dates if date != last_date): if all(series[key].get(date, 0) < config.TRENDLINE_DAILY_BURST for date in dates): del keys[keys.index(key)] totals[key] = series[key].get(last_date, 0) keys = sorted(keys, key=lambda key: totals[key], reverse=True) result += "['Date',%s],\n" % ','.join("'%s'" % key for key in keys) for date in sorted(dates): year, month, day = date.split('-') result += "[new Date(%s,%d,%s)," % (year, int(month) - 1, day) for serie in keys: result += "%s," % series[serie].get(date, 0) result += "],\n" result = result[:-1] return result
def _dataset(self): result = "\n" dates = set() rows = [] indexes = {} filters = self._get_filters() for filename in sorted(glob.glob(os.path.join( LOG_DIRECTORY, "*.csv")))[-config.TRENDLINE_PERIOD:]: with open(filename, "rb") as f: match = re.search(r"([\d-]+)\.csv", filename) if match: date = match.group(1) else: continue reader = csv.DictReader(f, delimiter=' ') for row in reader: key = (row["proto"], row["dst_port"], row["dst_ip"], row["src_ip"]) if filters and not (addr_to_int(row["src_ip"]) in filters or addr_to_int( row["dst_ip"]) in filters): continue if key not in indexes: indexes[key] = len(rows) rows.append(row) else: index = indexes[key] rows[index]["first_seen"] = min( int(rows[index]["first_seen"]), int(row["first_seen"])) rows[index]["last_seen"] = max( int(rows[index]["last_seen"]), int(row["last_seen"])) rows[index]["count"] = int(rows[index]["count"]) + int( row["count"]) for row in rows: try: port = int(row['dst_port']) port_name = MISC_PORTS.get(port) or socket.getservbyport( port, row['proto'].lower()) except Exception: port_name = None finally: result += "[" for column in ("proto", "dst_port", "dst_ip", "src_ip", "first_seen", "last_seen", "count"): if "_seen" in column: result += '"%s",' % datetime.datetime.utcfromtimestamp( int(row[column])).strftime(TIME_FORMAT) elif "_port" in column and port_name: result += '"%s (%s)",' % (row[column], port_name) else: result += '"%s",' % row[column] result += "],\n" return result
def _trendline_data(self): result = "\n" series = {} dates = set() filters = self._get_filters() for filename in sorted(glob.glob(os.path.join(LOG_DIRECTORY, "*.csv")))[-config.TRENDLINE_PERIOD:]: with open(filename, "rb") as f: match = re.search(r"([\d-]+)\.csv", filename) if match: date = match.group(1) else: continue reader = csv.DictReader(f, delimiter=' ') for row in reader: if filters and not (addr_to_int(row["src_ip"]) in filters or addr_to_int(row["dst_ip"]) in filters): continue try: port = int(row['dst_port']) port_name = MISC_PORTS.get(port) or socket.getservbyport(port, row['proto'].lower()) except Exception: port_name = None finally: serie = "%s%s%s" % (row['proto'].upper(), " %s" % row['dst_port'] if row['dst_port'].isdigit() else "", " (%s)" % port_name if port_name else "") if serie not in series: series[serie] = {} if date not in series[serie]: series[serie][date] = 0 series[serie][date] += 1 dates.add(date) keys = series.keys() if keys: last_date = max(dates) totals = {} for key in list(keys): if not filters: if any(series[key].get(date, 0) < config.TRENDLINE_DAILY_THRESHOLD for date in dates if date != last_date): if all(series[key].get(date, 0) < config.TRENDLINE_DAILY_BURST for date in dates): del keys[keys.index(key)] totals[key] = series[key].get(last_date, 0) keys = sorted(keys, key=lambda key: totals[key], reverse=True) result += "['Date',%s],\n" % ','.join("'%s'" % key for key in keys) for date in sorted(dates): year, month, day = date.split('-') result += "[new Date(%s,%d,%s)," % (year, int(month) - 1, day) for serie in keys: result += "%s," % series[serie].get(date, 0) result += "],\n" result = result[:-1] return result
def _process_packet(packet, sec, usec): try: if _datalink == pcapy.DLT_LINUX_SLL: packet = packet[2:] eth_header = struct.unpack("!HH8sH", packet[:ETH_LENGTH]) eth_protocol = socket.ntohs(eth_header[3]) if eth_protocol == IPPROTO: # IP ip_header = struct.unpack("!BBHHHBBH4s4s", packet[ETH_LENGTH:ETH_LENGTH + 20]) ip_length = ip_header[2] packet = packet[:ETH_LENGTH + ip_length] # truncate iph_length = (ip_header[0] & 0xF) << 2 protocol = ip_header[6] src_ip = socket.inet_ntoa(ip_header[8]) dst_ip = socket.inet_ntoa(ip_header[9]) proto = IPPROTO_LUT.get(protocol) local_src = False for prefix, mask in LOCAL_ADDRESSES: if addr_to_int(src_ip) & mask == prefix: local_src = True break if proto is None or any(_ in (config.IGNORE_ADDRESSES or "") for _ in (src_ip, dst_ip)): return # only process SYN packets if protocol == socket.IPPROTO_TCP: # TCP if local_src: return i = iph_length + ETH_LENGTH src_port, dst_port, _, _, _, flags = struct.unpack( "!HHLLBB", packet[i:i + 14]) if any( str(_) in (config.IGNORE_PORTS or "") for _ in (src_port, dst_port)): return dst_key = "%s:%s:%s" % (proto, dst_ip, dst_port) stat_key = "%s:%s" % (dst_key, src_ip) if flags == 2: # SYN set (only) if dst_key not in _traffic: _traffic[dst_key] = set() _traffic[dst_key].add(src_ip) if stat_key not in _auxiliary: _auxiliary[stat_key] = [sec, sec, 1] else: _auxiliary[stat_key][1] = sec _auxiliary[stat_key][2] += 1 else: if protocol == socket.IPPROTO_UDP: # UDP i = iph_length + ETH_LENGTH _ = packet[i:i + 4] if len(_) < 4: return src_port, dst_port = struct.unpack("!HH", _) else: # non-TCP/UDP (e.g. ICMP) src_port, dst_port = '-', '-' if any( str(_) in (config.IGNORE_PORTS or "") for _ in (src_port, dst_port)): return dst_key = "%s:%s:%s" % (proto, dst_ip, dst_port) stat_key = "%s:%s" % (dst_key, src_ip) flow = tuple( sorted((addr_to_int(src_ip), src_port, addr_to_int(dst_ip), dst_port))) if flow not in _auxiliary: _auxiliary[flow] = True if local_src: return if dst_key not in _traffic: _traffic[dst_key] = set() _traffic[dst_key].add(src_ip) _auxiliary[stat_key] = [sec, sec, 1] elif stat_key in _auxiliary: _auxiliary[stat_key][1] = sec _auxiliary[stat_key][2] += 1 except Exception: pass finally: _log_write()
def _process_packet(packet, sec, usec): try: if _datalink == pcapy.DLT_LINUX_SLL: packet = packet[2:] eth_header = struct.unpack("!HH8sH", packet[:ETH_LENGTH]) eth_protocol = socket.ntohs(eth_header[3]) if eth_protocol == IPPROTO: # IP ip_header = struct.unpack("!BBHHHBBH4s4s", packet[ETH_LENGTH : ETH_LENGTH + 20]) ip_length = ip_header[2] packet = packet[: ETH_LENGTH + ip_length] # truncate iph_length = (ip_header[0] & 0xF) << 2 protocol = ip_header[6] src_ip = socket.inet_ntoa(ip_header[8]) dst_ip = socket.inet_ntoa(ip_header[9]) proto = IPPROTO_LUT.get(protocol) local_src = False for prefix, mask in LOCAL_ADDRESSES: if addr_to_int(src_ip) & mask == prefix: local_src = True break if proto is None or any(_ in (config.IGNORE_ADDRESSES or "") for _ in (src_ip, dst_ip)): return # only process SYN packets if protocol == socket.IPPROTO_TCP: # TCP if local_src: return i = iph_length + ETH_LENGTH src_port, dst_port, _, _, _, flags = struct.unpack("!HHLLBB", packet[i : i + 14]) if any(str(_) in (config.IGNORE_PORTS or "") for _ in (src_port, dst_port)): return dst_key = "%s:%s:%s" % (proto, dst_ip, dst_port) stat_key = "%s:%s" % (dst_key, src_ip) if flags == 2: # SYN set (only) if dst_key not in _traffic: _traffic[dst_key] = set() _traffic[dst_key].add(src_ip) if stat_key not in _auxiliary: _auxiliary[stat_key] = [sec, sec, 1] else: _auxiliary[stat_key][1] = sec _auxiliary[stat_key][2] += 1 else: if protocol == socket.IPPROTO_UDP: # UDP i = iph_length + ETH_LENGTH _ = packet[i : i + 4] if len(_) < 4: return src_port, dst_port = struct.unpack("!HH", _) else: # non-TCP/UDP (e.g. ICMP) src_port, dst_port = "-", "-" if any(str(_) in (config.IGNORE_PORTS or "") for _ in (src_port, dst_port)): return dst_key = "%s:%s:%s" % (proto, dst_ip, dst_port) stat_key = "%s:%s" % (dst_key, src_ip) flow = tuple(sorted((addr_to_int(src_ip), src_port, addr_to_int(dst_ip), dst_port))) if flow not in _auxiliary: _auxiliary[flow] = True if local_src: return if dst_key not in _traffic: _traffic[dst_key] = set() _traffic[dst_key].add(src_ip) _auxiliary[stat_key] = [sec, sec, 1] elif stat_key in _auxiliary: _auxiliary[stat_key][1] = sec _auxiliary[stat_key][2] += 1 except Exception: pass finally: _log_write()