def parse_header_line(self, line): if not line: return if line[:1] != b"#": LOGGER.warning("Not a header line") return keyval = line[1:].split(self.sep, 1) if len(keyval) < 2: if line.startswith(b'#separator '): keyval = [b'separator', line[11:]] else: LOGGER.warn("Invalid header line") return directive = keyval[0] arg = keyval[1] if directive == b"separator": self.sep = decode_hex(arg[2:]) if arg.startswith(b'\\x') else arg elif directive == b"set_separator": self.set_sep = arg elif directive == b"empty_field": self.empty_field = arg elif directive == b"unset_field": self.unset_field = arg elif directive == b"path": self.path = arg.decode() elif directive == b"open": pass elif directive == b"fields": self.fields = arg.split(self.sep) elif directive == b"types": self.types = arg.split(self.sep)
def parse_header_line(self, line): if not line: return if line[:1] != b"#": LOGGER.warning("Not a header line") return keyval = line[1:].split(self.sep, 1) if len(keyval) < 2: if line.startswith(b'#separator '): keyval = [b'separator', line[11:]] else: LOGGER.warning("Invalid header line") return directive = keyval[0] arg = keyval[1] if directive == b"separator": self.sep = decode_hex(arg[2:]) if arg.startswith(b'\\x') else arg elif directive == b"set_separator": self.set_sep = arg elif directive == b"empty_field": self.empty_field = arg elif directive == b"unset_field": self.unset_field = arg elif directive == b"path": self.path = arg.decode() elif directive == b"open": pass elif directive == b"fields": self.fields = arg.split(self.sep) elif directive == b"types": self.types = arg.split(self.sep)
def test(self, v4=True, v6=True): start = datetime.now() for srvname, addr, res in self.do_test(v4=v4, v6=v6): if not res: continue if (len(res) == 1 and res[0].rtype == 'SOA'): # SOA only: transfer failed continue LOGGER.info('AXFR success for %r on %r', self.domain, addr) line_fmt = "| %%-%ds %%-%ds %%s" % ( max(len(r.name) for r in res), max(len(r.rtype) for r in res), ) yield { "addr": addr, "hostnames": [{ "name": srvname, "type": "user", "domains": list(get_domains(srvname)) }], "schema_version": SCHEMA_VERSION, "starttime": start, "endtime": datetime.now(), "ports": [{ "port": 53, "protocol": "tcp", "service_name": "domain", "state_state": "open", "scripts": [{ "id": "dns-zone-transfer", "output": '\nDomain: %s\n%s\n\\\n' % ( self.domain, '\n'.join(line_fmt % (r.name, r.rtype, r.data) for r in res), ), "dns-zone-transfer": [{ "domain": self.domain, "records": [{ "name": r.name, "ttl": r.ttl, "class": r.rclass, "type": r.rtype, "data": r.data } for r in res] }] }] }], } start = datetime.now()
def displayfunction(cur: DBCursor) -> None: outdb.start_store_hosts() for rec in cur: try: del rec["_id"] except KeyError: pass try: outdb.store_host(rec) except Exception: LOGGER.warning("Cannot insert record %r", rec, exc_info=True) outdb.stop_store_hosts()
def _dns_do_query(name, rtype=None, srv=None): cmd = ["dig", "+noquestion", "+nocomments", "+nocmd", "+nostat"] if rtype: cmd.extend(["-t", rtype]) cmd.append(name) if srv: cmd.append("@%s" % srv) for line in subprocess.Popen(cmd, stdout=subprocess.PIPE).stdout: line = line.decode()[:-1] if line and line[:1] != ";": try: yield nsrecord(*line.split(None, 4)) except TypeError: LOGGER.warning("Cannot read line %r", line)
def _dns_do_query(name, rtype=None, srv=None): cmd = ['dig', '+noquestion', '+nocomments', '+nocmd', '+nostat'] if rtype: cmd.extend(['-t', rtype]) cmd.append(name) if srv: cmd.append('@%s' % srv) for line in subprocess.Popen(cmd, stdout=subprocess.PIPE).stdout: line = line.decode()[:-1] if line and line[:1] != ';': try: yield nsrecord(*line.split(None, 4)) except TypeError: LOGGER.warning('Cannot read line %r', line)
def parse_line(self, line: bytes) -> Dict[str, Any]: m = LINE_RE.match(line.decode()) if not m: return {} res: Dict[str, Any] = {} # time of event res["ts"] = datetime.datetime.strptime(m.group("time"), "%Y/%m/%d %H:%M:%S") # data of event for entry in m.group("data").split("|"): k, v = entry.split("=", 1) if k in res: LOGGER.warning("Duplicate key in line [%r]", line) return {} res[k] = v return res
def _dns_do_query( name: str, rtype: Optional[str] = None, srv: Optional[str] = None) -> Generator[nsrecord, None, None]: cmd = ["dig", "+noquestion", "+nocomments", "+nocmd", "+nostat"] if rtype: cmd.extend(["-t", rtype]) cmd.append(name) if srv: cmd.append("@%s" % srv) with subprocess.Popen(cmd, stdout=subprocess.PIPE) as proc: assert proc.stdout is not None for line_bytes in proc.stdout: line = line_bytes.decode()[:-1] if line and line[:1] != ";": try: yield nsrecord(*line.split(None, 4)) except TypeError: LOGGER.warning("Cannot read line %r", line)
def parse_line(self, line: bytes) -> Dict[str, Any]: m = LINE_RE.match(line.decode()) if not m: LOGGER.warning("Cannot parse line [%r]", line) return {} try: # Timestamp, without timezone specification timestamp = datetime.datetime.strptime( m.group("datetime").split()[0], "%d/%b/%Y:%H:%M:%S" ) except ValueError: LOGGER.warning("Cannot parse timestamp from line [%r]", line) return {} # data of event return { "host": m.group("addr"), "ts": timestamp, "useragent": m.group("useragent"), }
def add_cpe_values(hostrec, path, cpe_values): """Add CPE values (`cpe_values`) to the `hostrec` at the given `path`. CPEs are indexed in a dictionary to agglomerate origins, but this dict is replaced with its values() in ._pre_addhost() or in .store_scan_json_zgrab(), or in the function that calls add_cpe_values(), depending on the context. """ cpes = hostrec.setdefault('cpes', {}) for cpe in cpe_values: if cpe not in cpes: try: cpeobj = cpe2dict(cpe) except ValueError: LOGGER.warning("Invalid cpe format (%s)", cpe) continue cpes[cpe] = cpeobj else: cpeobj = cpes[cpe] cpeobj.setdefault('origins', set()).add(path)
def rec_iter( filenames: List[str], sensor: Optional[str], ignore_rules: Dict[str, Dict[str, List[Tuple[int, int]]]], ) -> Generator[Tuple[Optional[int], Record], None, None]: ignorenets = ignore_rules.get("IGNORENETS", {}) neverignore = ignore_rules.get("NEVERIGNORE", {}) for fname in filenames: with P0fFile(fname) as fdesc: for line in fdesc: if not line: continue if "mod" not in line: LOGGER.warning("no mod detected [%r]", line) continue if line["mod"] not in ["syn", "syn+ack"]: continue if "subj" not in line or line["subj"] not in line: LOGGER.warning("no subj detected [%r]", line) continue if "raw_sig" not in line: LOGGER.warning("no raw_sig detected [%r]", line) continue infos = {} if "os" in line and line["os"] != "???": infos["os"] = line["os"] if "dist" in line: infos["dist"] = line["dist"] if "params" in line and line["params"].lower() != "none": infos["params"] = line["params"] host = line[line["subj"]].split("/")[0] port = int(line["srv"].split("/") [1]) if line["subj"] == "srv" else None for tstamp, rec in handle_rec( # sensor sensor, # ignorenets, ignorenets, # neverignore, neverignore, # timestamp timestamp=line["ts"], # uid uid=None, # host host=host, # srvport srvport=port, # recon_type recon_type="P0FV3_%s" % line["mod"].upper(), # source source="P0FV3", # value value=line["raw_sig"], # targetval targetval=None, ): if infos: rec["infos"] = infos yield (tstamp, rec)
def __init__(self, fname: str, pcap_filter: Optional[str] = None) -> None: """Init Ipatbles class.""" if pcap_filter is not None: LOGGER.warning("PCAP filter not supported in Iptables") super().__init__(fname)
def test(self, v4=True, v6=True): start = datetime.now() for srvname, addr, res in self.do_test(v4=v4, v6=v6): srvname = srvname.rstrip(".") if not res: continue if len(res) == 1 and res[0].rtype == "SOA": # SOA only: transfer failed continue LOGGER.info("AXFR success for %r on %r", self.domain, addr) line_fmt = "| %%-%ds %%-%ds %%s" % ( max(len(r.name) for r in res), max(len(r.rtype) for r in res), ) yield { "addr": addr, "hostnames": [{ "name": srvname, "type": "user", "domains": list(get_domains(srvname)), }], "schema_version": SCHEMA_VERSION, "starttime": start, "endtime": datetime.now(), "ports": [ { "port": 53, "protocol": "tcp", "service_name": "domain", "state_state": "open", "scripts": [ { "id": "dns-zone-transfer", "output": "\nDomain: %s\n%s\n\\\n" % ( self.domain, "\n".join(line_fmt % (r.name, r.rtype, r.data) for r in res), ), "dns-zone-transfer": [{ "domain": self.domain, "records": [{ "name": r.name, "ttl": r.ttl, "class": r.rclass, "type": r.rtype, "data": r.data, } for r in res], }], }, ], }, ], } hosts = {} for r in res: if r.rclass != "IN": continue if r.rtype in ["A", "AAAA"]: name = r.name.rstrip(".") hosts.setdefault(r.data, set()).add((r.rtype, name)) for host, records in hosts.items(): yield { "addr": host, "hostnames": [{ "name": rec[1], "type": rec[0], "domains": list(get_domains(rec[1])), } for rec in records], "schema_version": SCHEMA_VERSION, "starttime": start, "endtime": datetime.now(), } start = datetime.now()
def displayfunction(cur: Iterable[Record], scan: Optional[Any] = None) -> None: if scan is not None: LOGGER.debug("Scan not displayed in JSON mode") for rec in cur: print(json.dumps(rec, default=serialize))
def displayfunction(cur, scan=None): if scan is not None: LOGGER.debug("Scan not displayed in JSON mode") for rec in cur: print(json.dumps(rec))