def main() -> None: parser = argparse.ArgumentParser( description="Tool for ip addresses manipulation.", ) parser.add_argument( "ips", nargs="*", help="Display results for specified IP addresses or ranges.", ) args = parser.parse_args() while "-" in args.ips: idx = args.ips.index("-") args.ips = (args.ips[:idx - 1] + ["%s-%s" % (args.ips[idx - 1], args.ips[idx + 1])] + args.ips[idx + 2:]) for a in args.ips: if "/" in a: a = utils.net2range(a) print("%s-%s" % (a[0], a[1])) elif "-" in a: a = a.split("-", 1) if a[0].isdigit(): a[0] = int(a[0]) if a[1].isdigit(): a[1] = int(a[1]) for n in utils.range2nets((a[0], a[1])): print(n) else: if a.isdigit(): a = utils.force_int2ip(int(a)) else: a = utils.force_ip2int(a) print(a)
def main(): parser = argparse.ArgumentParser( description='Tool for ip addresses manipulation.', ) parser.add_argument( 'ips', nargs='*', help='Display results for specified IP addresses or ranges.', ) args = parser.parse_args() while '-' in args.ips: idx = args.ips.index('-') args.ips = ( args.ips[:idx - 1] + ['%s-%s' % (args.ips[idx - 1], args.ips[idx + 1])] + args.ips[idx + 2:] ) for a in args.ips: if '/' in a: a = utils.net2range(a) print("%s-%s" % (a[0], a[1])) elif '-' in a: a = a.split('-', 1) if a[0].isdigit(): a[0] = int(a[0]) if a[1].isdigit(): a[1] = int(a[1]) for n in utils.range2nets((a[0], a[1])): print(n) else: if a.isdigit(): a = utils.force_int2ip(int(a)) else: a = utils.force_ip2int(a) print(a)
def handle_rec(sensor, ignorenets, neverignore, # these argmuments are provided by **bro_line timestamp=None, uid=None, host=None, srvport=None, recon_type=None, source=None, value=None, targetval=None): if host is None: spec = { 'targetval': targetval, 'recontype': recon_type, 'value': value } else: spec = { 'addr': utils.force_ip2int(host), 'recontype': recon_type, 'value': value } if sensor is not None: spec.update({'sensor': sensor}) if srvport is not None: spec.update({'port': srvport}) if source is not None: spec.update({'source': source}) spec = _prepare_rec(spec, ignorenets, neverignore) float_ts = utils.datetime2timestamp(timestamp) return float_ts, spec
def _get_ignore_rules( ignore_spec: Optional[str], ) -> Dict[str, Dict[str, List[Tuple[int, int]]]]: """Executes the ignore_spec file and returns the ignore_rules dictionary. """ ignore_rules: Dict[str, Dict[str, List[Tuple[int, int]]]] = {} if ignore_spec is not None: with open(ignore_spec, "rb") as fdesc: # pylint: disable=exec-used exec(compile(fdesc.read(), ignore_spec, "exec"), ignore_rules) subdict = ignore_rules.get("IGNORENETS") if subdict: for subkey, values in subdict.items(): subdict[subkey] = [(force_ip2int(val[0]), force_ip2int(val[1])) for val in values] return ignore_rules
def _prepare_rec(spec, ignorenets, neverignore): # First of all, let's see if we are supposed to ignore this spec, # and if so, do so. if 'addr' in spec and \ spec.get('source') not in neverignore.get(spec['recontype'], []): for start, stop in ignorenets.get(spec['recontype'], ()): if start <= utils.force_ip2int(spec['addr']) <= stop: return None # Then, let's clean up the records. # Change Symantec's random user agents (matching SYMANTEC_UA) to # the constant string 'SymantecRandomUserAgent'. if spec['recontype'] == 'HTTP_CLIENT_HEADER' and \ spec.get('source') == 'USER-AGENT': if SYMANTEC_UA.match(spec['value']): spec['value'] = 'SymantecRandomUserAgent' # Change any Digest authorization header to remove non-constant # information. On one hand we loose the necessary information to # try to recover the passwords, but on the other hand we store # specs with different challenges but the same username, realm, # host and sensor in the same records. elif spec['recontype'] in ['HTTP_CLIENT_HEADER', 'HTTP_CLIENT_HEADER_SERVER'] and \ spec.get('source') in ['AUTHORIZATION', 'PROXY-AUTHORIZATION']: authtype = spec['value'].split(None, 1)[0] if authtype.lower() == 'digest': try: # we only keep relevant info value = [val for val in _split_digest_auth(spec['value'][6:].strip()) if DIGEST_AUTH_INFOS.match(val)] spec['value'] = '%s %s' % (authtype, ','.join(value)) except Exception: utils.LOGGER.warning("Cannot parse digest error for %r", spec, exc_info=True) elif authtype.lower() in ['negotiate', 'kerberos', 'oauth', 'ntlm']: spec['value'] = authtype # p0f in Bro hack: we use the "source" field to provide the # "distance" and "version" values elif spec['recontype'] == 'P0F': distance, version = spec.pop('source').split('-', 1) try: spec['distance'] = int(distance) except ValueError: pass if version: spec['version'] = version # TCP server banners: try to normalize data elif spec['recontype'] == 'TCP_SERVER_BANNER': newvalue = value = utils.nmap_decode_data(spec['value']) for pattern, replace in TCP_SERVER_PATTERNS: if pattern.search(newvalue): newvalue = pattern.sub(replace, newvalue) if newvalue != value: spec['value'] = utils.nmap_encode_data(newvalue) return spec
def lookup(self, ip): node_no = 0 addr = utils.force_ip2int(ip) for i in range(96 if self.ip_version == 4 else 0, 128): flag = (addr >> (127 - i)) & 1 next_node_no = self.read_record(node_no, flag) if next_node_no == 0: raise Exception("Invalid file format") if next_node_no >= self.node_count: pos = next_node_no - self.node_count - self.DATA_SECTION_SEPARATOR_SIZE return self.decode(pos, self.data_section_start)[1] node_no = next_node_no raise Exception("Invalid file format")
def main(): if USING_ARGPARSE: parser = argparse.ArgumentParser( description='Tool for ip addresses manipulation.', ) else: parser = optparse.OptionParser( description='Tool for ip addresses manipulation.', ) parser.parse_args_orig = parser.parse_args def my_parse_args(): res = parser.parse_args_orig() res[0].ensure_value('ips', res[1]) return res[0] parser.parse_args = my_parse_args parser.add_argument = parser.add_option if USING_ARGPARSE: parser.add_argument('ips', nargs='*', help='Display results for specified IP addresses' ' or ranges.') args = parser.parse_args() for a in args.ips: if '/' in a: a = utils.net2range(a) print("%s-%s" % (a[0], a[1])) elif '-' in a: a = a.split('-', 1) if a[0].isdigit(): a[0] = int(a[0]) if a[1].isdigit(): a[1] = int(a[1]) for n in utils.range2nets((a[0], a[1])): print(n) else: if a.isdigit(): a = utils.force_int2ip(int(a)) else: a = utils.force_ip2int(a) print(a)
def handle_rec( sensor, ignorenets, neverignore, # these argmuments are provided by **bro_line timestamp, uid, host, srvport, recon_type, source, value, targetval): if host is None: spec = { 'targetval': targetval, 'recontype': recon_type, 'value': value } else: host = utils.force_ip2int(host) spec = {'addr': host, 'recontype': recon_type, 'value': value} if sensor is not None: spec.update({'sensor': sensor}) if srvport is not None: spec.update({'port': srvport}) if source is not None: spec.update({'source': source}) spec = _prepare_rec(spec, ignorenets, neverignore) # Python 2/3 compat: python 3 has datetime.timestamp() try: float_ts = timestamp.timestamp() except AttributeError: float_ts = (time.mktime(timestamp.timetuple()) + timestamp.microsecond / (1000000.)) return float_ts, spec
def get_nmap(subdb): """Get records from Nmap & View databases :param str subdb: database to query (must be "scans" or "view") :query str q: query (including limit/skip and sort) :query str f: filter :query str callback: callback to use for JSONP results :query bool ipsasnumbers: to get IP addresses as numbers rather than as strings :query bool datesasstrings: to get dates as strings rather than as timestamps :query str format: "json" (the default) or "ndjson" :status 200: no error :status 400: invalid referer :>jsonarr object: results """ subdb_tool = "view" if subdb == "view" else "scancli" subdb = db.view if subdb == "view" else db.nmap flt_params = get_base(subdb) # PostgreSQL: the query plan if affected by the limit and gives # really poor results. This is a temporary workaround (look for # XXX-WORKAROUND-PGSQL). # result = subdb.get(flt_params.flt, limit=flt_params.limit, # skip=flt_params.skip, sort=flt_params.sortby) result = subdb.get( flt_params.flt, skip=flt_params.skip, sort=flt_params.sortby, fields=flt_params.fields, ) if flt_params.unused: msg = "Option%s not understood: %s" % ( "s" if len(flt_params.unused) > 1 else "", ", ".join(flt_params.unused), ) if flt_params.callback is not None: yield webutils.js_alert("param-unused", "warning", msg) utils.LOGGER.warning(msg) elif flt_params.callback is not None: yield webutils.js_del_alert("param-unused") if config.DEBUG: msg1 = "filter: %r" % subdb.flt2str(flt_params.flt) msg2 = "user: %r" % webutils.get_user() utils.LOGGER.debug(msg1) utils.LOGGER.debug(msg2) if flt_params.callback is not None: yield webutils.js_alert("filter", "info", msg1) yield webutils.js_alert("user", "info", msg2) version_mismatch = {} if flt_params.callback is None: if flt_params.fmt == "json": yield "[\n" else: yield "%s([\n" % flt_params.callback # XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): for fld in ["_id", "scanid"]: try: del rec[fld] except KeyError: pass if flt_params.ipsasnumbers: rec["addr"] = utils.force_ip2int(rec["addr"]) if not flt_params.datesasstrings: for field in subdb.datetime_fields: _set_datetime_field(subdb, rec, field) for port in rec.get("ports", []): if "screendata" in port: port["screendata"] = utils.encode_b64(port["screendata"]) for script in port.get("scripts", []): if "masscan" in script: try: del script["masscan"]["raw"] except KeyError: pass if not flt_params.ipsasnumbers: if "traces" in rec: for trace in rec["traces"]: trace["hops"].sort(key=lambda x: x["ttl"]) for hop in trace["hops"]: hop["ipaddr"] = utils.force_int2ip(hop["ipaddr"]) addresses = rec.get("addresses", {}).get("mac") if addresses: newaddresses = [] for addr in addresses: manuf = utils.mac2manuf(addr) if manuf and manuf[0]: newaddresses.append({"addr": addr, "manuf": manuf[0]}) else: newaddresses.append({"addr": addr}) rec["addresses"]["mac"] = newaddresses if flt_params.fmt == "ndjson": yield "%s\n" % json.dumps(rec, default=utils.serialize) else: yield "%s\t%s" % ( "" if i == 0 else ",\n", json.dumps(rec, default=utils.serialize), ) check = subdb.cmp_schema_version_host(rec) if check: version_mismatch[check] = version_mismatch.get(check, 0) + 1 # XXX-WORKAROUND-PGSQL if flt_params.limit and i + 1 >= flt_params.limit: break if flt_params.callback is None: if flt_params.fmt == "json": yield "\n]\n" else: yield "\n]);\n" messages = { 1: lambda count: ("%d document%s displayed %s out-of-date. Please run " "the following command: 'ivre %s " "--update-schema;" % ( count, "s" if count > 1 else "", "are" if count > 1 else "is", subdb_tool, )), -1: lambda count: ("%d document%s displayed ha%s been inserted by " "a more recent version of IVRE. Please update " "IVRE!" % (count, "s" if count > 1 else "", "ve" if count > 1 else "s")), } for mismatch, count in version_mismatch.items(): message = messages[mismatch](count) if flt_params.callback is not None: yield webutils.js_alert( "version-mismatch-%d" % ((mismatch + 1) // 2), "warning", message) utils.LOGGER.warning(message)
def _prepare_rec(spec, ignorenets, neverignore): # First of all, let's see if we are supposed to ignore this spec, # and if so, do so. if 'addr' in spec and \ spec.get('source') not in neverignore.get(spec['recontype'], []): for start, stop in ignorenets.get(spec['recontype'], ()): if start <= utils.force_ip2int(spec['addr']) <= stop: return None # Then, let's clean up the records. # Change Symantec's random user agents (matching SYMANTEC_UA) to # the constant string 'SymantecRandomUserAgent'. if spec['recontype'] == 'HTTP_CLIENT_HEADER' and \ spec.get('source') == 'USER-AGENT': if SYMANTEC_UA.match(spec['value']): spec['value'] = 'SymantecRandomUserAgent' elif KASPERSKY_UA.match(spec['value']): spec['value'] = 'KasperskyWeirdUserAgent' else: match = SYMANTEC_SEP_UA.match(spec['value']) if match is not None: spec['value'] = '%s%s' % match.groups() # Change any Digest authorization header to remove non-constant # information. On one hand we loose the necessary information to # try to recover the passwords, but on the other hand we store # specs with different challenges but the same username, realm, # host and sensor in the same records. elif spec['recontype'] in ['HTTP_CLIENT_HEADER', 'HTTP_CLIENT_HEADER_SERVER'] and \ spec.get('source') in ['AUTHORIZATION', 'PROXY-AUTHORIZATION']: authtype = spec['value'].split(None, 1)[0] if authtype.lower() == 'digest': try: # we only keep relevant info value = [val for val in _split_digest_auth(spec['value'][6:].strip()) if DIGEST_AUTH_INFOS.match(val)] spec['value'] = '%s %s' % (authtype, ','.join(value)) except Exception: utils.LOGGER.warning("Cannot parse digest error for %r", spec, exc_info=True) elif authtype.lower() in ['negotiate', 'kerberos', 'oauth', 'ntlm']: spec['value'] = authtype # p0f in Bro hack: we use the "source" field to provide the # "distance" and "version" values elif spec['recontype'] == 'P0F': distance, version = spec.pop('source').split('-', 1) try: spec['distance'] = int(distance) except ValueError: pass if version: spec['version'] = version # TCP server banners: try to normalize data elif spec['recontype'] == 'TCP_SERVER_BANNER': newvalue = value = utils.nmap_decode_data(spec['value']) for pattern, replace in TCP_SERVER_PATTERNS: if pattern.search(newvalue): newvalue = pattern.sub(replace, newvalue) if newvalue != value: spec['value'] = utils.nmap_encode_data(newvalue) # SSL_{CLIENT,SERVER} JA3 elif ((spec['recontype'] == 'SSL_CLIENT' and spec['source'] == 'ja3') or (spec['recontype'] == 'SSL_SERVER' and spec['source'].startswith('ja3-'))): value = spec['value'] spec.setdefault('infos', {})['raw'] = value spec['value'] = hashlib.new("md5", value.encode()).hexdigest() if spec['recontype'] == 'SSL_SERVER': clientvalue = spec['source'][4:] spec['infos'].setdefault('client', {})['raw'] = clientvalue spec['source'] = 'ja3-%s' % hashlib.new( "md5", clientvalue.encode(), ).hexdigest() # Check DNS Blacklist answer elif spec['recontype'] == 'DNS_ANSWER': if any(spec['value'].endswith(dnsbl) for dnsbl in config.DNS_BLACKLIST_DOMAINS): dnsbl_val = spec['value'] spec['recontype'] = 'DNS_BLACKLIST' spec['value'] = spec['addr'] spec.update({'source': "%s-%s" % (dnsbl_val.split('.', 4)[4], spec['source'])}) spec['addr'] = '.'.join(dnsbl_val.split('.')[3::-1]) return spec
def _prepare_rec(spec, ignorenets, neverignore): # First of all, let's see if we are supposed to ignore this spec, # and if so, do so. if "addr" in spec and spec.get("source") not in neverignore.get( spec["recontype"], []): for start, stop in ignorenets.get(spec["recontype"], []): if start <= utils.force_ip2int(spec["addr"]) <= stop: return # Then, let's clean up the records. # Change Symantec's random user agents (matching SYMANTEC_UA) to # the constant string "SymantecRandomUserAgent". if spec["recontype"] == "HTTP_CLIENT_HEADER" and spec.get( "source") == "USER-AGENT": if SYMANTEC_UA.match(spec["value"]): spec["value"] = "SymantecRandomUserAgent" elif KASPERSKY_UA.match(spec["value"]): spec["value"] = "KasperskyWeirdUserAgent" else: match = SYMANTEC_SEP_UA.match(spec["value"]) if match is not None: spec["value"] = "%s%s" % match.groups() # Change any Digest authorization header to remove non-constant # information. On one hand we loose the necessary information to # try to recover the passwords, but on the other hand we store # specs with different challenges but the same username, realm, # host and sensor in the same records. elif spec["recontype"] in { "HTTP_CLIENT_HEADER", "HTTP_CLIENT_HEADER_SERVER", } and spec.get("source") in {"AUTHORIZATION", "PROXY-AUTHORIZATION"}: value = spec["value"] if value: authtype = value.split(None, 1)[0] if authtype.lower() == "digest": try: # we only keep relevant info spec["value"] = "%s %s" % ( authtype, ",".join( val for val in _split_digest_auth(value[6:].strip()) if DIGEST_AUTH_INFOS.match(val)), ) except Exception: utils.LOGGER.warning("Cannot parse digest error for %r", spec, exc_info=True) elif ntlm._is_ntlm_message(value): # NTLM_NEGOTIATE and NTLM_AUTHENTICATE yield from _prepare_rec_ntlm(spec, "NTLM_CLIENT_FLAGS") return elif authtype.lower() in {"negotiate", "kerberos", "oauth"}: spec["value"] = authtype elif spec["recontype"] == "HTTP_SERVER_HEADER" and spec.get("source") in { "WWW-AUTHENTICATE", "PROXY-AUTHENTICATE", }: value = spec["value"] if value: authtype = value.split(None, 1)[0] if authtype.lower() == "digest": try: # we only keep relevant info spec["value"] = "%s %s" % ( authtype, ",".join( val for val in _split_digest_auth(value[6:].strip()) if DIGEST_AUTH_INFOS.match(val)), ) except Exception: utils.LOGGER.warning("Cannot parse digest error for %r", spec, exc_info=True) elif ntlm._is_ntlm_message(value): # NTLM_CHALLENGE yield from _prepare_rec_ntlm(spec, "NTLM_SERVER_FLAGS") return elif authtype.lower() in {"negotiate", "kerberos", "oauth"}: spec["value"] = authtype # TCP server banners: try to normalize data elif spec["recontype"] == "TCP_SERVER_BANNER": newvalue = value = utils.nmap_decode_data(spec["value"]) for pattern, replace in TCP_SERVER_PATTERNS: if pattern.search(newvalue): newvalue = pattern.sub(replace, newvalue) if newvalue != value: spec["value"] = utils.nmap_encode_data(newvalue) elif spec["recontype"] in {"TCP_CLIENT_BANNER", "TCP_HONEYPOT_HIT"}: if spec["value"]: data = utils.nmap_decode_data(spec["value"]) if data in scanners.TCP_PROBES: scanner, probe = scanners.TCP_PROBES[data] info = { "service_name": "scanner", "service_product": scanner, } if probe is not None: info["service_extrainfo"] = "TCP probe %s" % probe spec.setdefault("infos", {}).update(info) else: probe = utils.get_nmap_probes("tcp").get(data) if probe is not None: spec.setdefault("infos", {}).update({ "service_name": "scanner", "service_product": "Nmap", "service_extrainfo": "TCP probe %s" % probe, }) elif spec["recontype"] == "UDP_HONEYPOT_HIT": data = utils.nmap_decode_data(spec["value"]) if data in scanners.UDP_PROBES: scanner, probe = scanners.UDP_PROBES[data] info = { "service_name": "scanner", "service_product": scanner, } if probe is not None: info["service_extrainfo"] = "UDP probe %s" % probe spec.setdefault("infos", {}).update(info) else: probe = utils.get_nmap_probes("udp").get(data) if probe is not None: spec.setdefault("infos", {}).update({ "service_name": "scanner", "service_product": "Nmap", "service_extrainfo": "UDP probe %s" % probe, }) else: payload = utils.get_nmap_udp_payloads().get(data) if payload is not None: spec.setdefault("infos", {}).update({ "service_name": "scanner", "service_product": "Nmap", "service_extrainfo": "UDP payload %s" % payload, }) elif spec["recontype"] == "STUN_HONEYPOT_REQUEST": spec["value"] = utils.nmap_decode_data(spec["value"]) # SSL_{CLIENT,SERVER} JA3 elif (spec["recontype"] == "SSL_CLIENT" and spec["source"] == "ja3") or (spec["recontype"] == "SSL_SERVER" and spec["source"].startswith("ja3-")): value = spec["value"] spec.setdefault("infos", {})["raw"] = value spec["value"] = hashlib.new("md5", value.encode()).hexdigest() if spec["recontype"] == "SSL_SERVER": clientvalue = spec["source"][4:] spec["infos"].setdefault("client", {})["raw"] = clientvalue spec["source"] = ("ja3-%s" % hashlib.new( "md5", clientvalue.encode(), ).hexdigest()) # SSH_{CLIENT,SERVER}_HASSH elif spec["recontype"] in ["SSH_CLIENT_HASSH", "SSH_SERVER_HASSH"]: value = spec["value"] spec.setdefault("infos", {})["raw"] = value spec["value"] = hashlib.new("md5", value.encode()).hexdigest() # Check DNS Blacklist answer elif spec["recontype"] == "DNS_ANSWER": if any((spec.get("value") or "").endswith(dnsbl) for dnsbl in config.DNS_BLACKLIST_DOMAINS): dnsbl_val = spec["value"] match = DNSBL_START.search(dnsbl_val) if match is not None: spec["recontype"] = "DNS_BLACKLIST" spec["value"] = spec.get("addr") spec["source"] = "%s-%s" % (dnsbl_val[match.end():], spec["source"]) addr = match.group() # IPv4 if addr.count(".") == 4: spec["addr"] = ".".join(addr.split(".")[3::-1]) # IPv6 else: spec["addr"] = utils.int2ip6( int(addr.replace(".", "")[::-1], 16)) yield spec
def get_nmap_action(action): flt_params = get_nmap_base() preamble = "[\n" postamble = "]\n" r2res = lambda x: x if action == "timeline": if hasattr(db.nmap, "get_open_port_count"): result = list( db.nmap.get_open_port_count( flt_params.flt, archive=flt_params.archive, )) count = len(result) else: result = db.nmap.get( flt_params.flt, archive=flt_params.archive, fields=['addr', 'starttime', 'openports.count']) count = result.count() if request.params.get("modulo") is None: r2time = lambda r: int(utils.datetime2timestamp(r['starttime'])) else: r2time = lambda r: (int(utils.datetime2timestamp(r['starttime'])) % int(request.params.get("modulo"))) if flt_params.ipsasnumbers: r2res = lambda r: [ r2time(r), utils.force_ip2int(r['addr']), r['openports']['count'] ] else: r2res = lambda r: [ r2time(r), utils.force_int2ip(r['addr']), r['openports']['count'] ] elif action == "coordinates": preamble = '{"type": "GeometryCollection", "geometries": [\n' postamble = ']}\n' result = list( db.nmap.getlocations(flt_params.flt, archive=flt_params.archive)) count = len(result) r2res = lambda r: { "type": "Point", "coordinates": r['_id'], "properties": { "count": r['count'] }, } elif action == "countopenports": if hasattr(db.nmap, "get_open_port_count"): result = db.nmap.get_open_port_count(flt_params.flt, archive=flt_params.archive) else: result = db.nmap.get(flt_params.flt, archive=flt_params.archive, fields=['addr', 'openports.count']) if hasattr(result, "count"): count = result.count() else: count = db.nmap.count(flt_params.flt, archive=flt_params.archive, fields=['addr', 'openports.count']) if flt_params.ipsasnumbers: r2res = lambda r: [ utils.force_ip2int(r['addr']), r['openports']['count'] ] else: r2res = lambda r: [ utils.force_int2ip(r['addr']), r['openports']['count'] ] elif action == "ipsports": if hasattr(db.nmap, "get_ips_ports"): result = list( db.nmap.get_ips_ports(flt_params.flt, archive=flt_params.archive)) count = sum(len(host.get('ports', [])) for host in result) else: result = db.nmap.get( flt_params.flt, archive=flt_params.archive, fields=['addr', 'ports.port', 'ports.state_state']) count = sum(len(host.get('ports', [])) for host in result) result.rewind() if flt_params.ipsasnumbers: r2res = lambda r: [ utils.force_ip2int(r['addr']), [[p['port'], p['state_state']] for p in r.get('ports', []) if 'state_state' in p] ] else: r2res = lambda r: [ utils.force_int2ip(r['addr']), [[p['port'], p['state_state']] for p in r.get('ports', []) if 'state_state' in p] ] elif action == "onlyips": result = db.nmap.get(flt_params.flt, archive=flt_params.archive, fields=['addr']) if hasattr(result, "count"): count = result.count() else: count = db.nmap.count(flt_params.flt, archive=flt_params.archive, fields=['addr']) if flt_params.ipsasnumbers: r2res = lambda r: utils.force_ip2int(r['addr']) else: r2res = lambda r: utils.force_int2ip(r['addr']) elif action == "diffcats": if request.params.get("onlydiff"): output = db.nmap.diff_categories(request.params.get("cat1"), request.params.get("cat2"), flt=flt_params.flt, include_both_open=False) else: output = db.nmap.diff_categories(request.params.get("cat1"), request.params.get("cat2"), flt=flt_params.flt) count = 0 result = {} if flt_params.ipsasnumbers: for res in output: result.setdefault(res["addr"], []).append([res['port'], res['value']]) count += 1 else: for res in output: result.setdefault(utils.int2ip(res["addr"]), []).append([res['port'], res['value']]) count += 1 result = viewitems(result) if flt_params.callback is not None: if count >= config.WEB_WARN_DOTS_COUNT: yield ( 'if(confirm("You are about to ask your browser to display %d ' 'dots, which is a lot and might slow down, freeze or crash ' 'your browser. Do you want to continue?")) {\n' % count) yield '%s(\n' % flt_params.callback yield preamble # hack to avoid a trailing comma result = iter(result) try: rec = next(result) except StopIteration: pass else: yield json.dumps(r2res(rec)) for rec in result: yield ",\n" + json.dumps(r2res(rec)) yield "\n" yield postamble if flt_params.callback is not None: yield ");" if count >= config.WEB_WARN_DOTS_COUNT: yield '}\n' else: yield "\n"
def convert_ip(cls, addr): return utils.force_ip2int(addr)
def get_passive(): """Get records from Passive database :query str q: query (only used for limit/skip and sort) :query str f: filter :query str callback: callback to use for JSONP results :query bool ipsasnumbers: to get IP addresses as numbers rather than as strings :query bool datesasstrings: to get dates as strings rather than as timestamps :query str format: "json" (the default) or "ndjson" :status 200: no error :status 400: invalid referer :>jsonarr object: results """ flt_params = get_base(db.passive) # PostgreSQL: the query plan if affected by the limit and gives # really poor results. This is a temporary workaround (look for # XXX-WORKAROUND-PGSQL). # result = db.passive.get(flt_params.flt, limit=flt_params.limit, # skip=flt_params.skip, sort=flt_params.sortby) result = db.passive.get( flt_params.flt, skip=flt_params.skip, sort=flt_params.sortby, fields=flt_params.fields, ) if flt_params.callback is None: if flt_params.fmt == "json": yield "[\n" else: yield "%s([\n" % flt_params.callback # XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): try: del rec["_id"] except KeyError: pass if "addr" in rec and flt_params.ipsasnumbers: rec["addr"] = utils.force_ip2int(rec["addr"]) if not flt_params.datesasstrings: for field in db.passive.datetime_fields: _set_datetime_field(db.passive, rec, field) if rec.get("recontype") == "SSL_SERVER" and rec.get("source") in { "cert", "cacert", }: rec["value"] = utils.encode_b64(rec["value"]).decode() if flt_params.fmt == "ndjson": yield "%s\n" % json.dumps(rec, default=utils.serialize) else: yield "%s\t%s" % ( "" if i == 0 else ",\n", json.dumps(rec, default=utils.serialize), ) if flt_params.limit and i + 1 >= flt_params.limit: break if flt_params.callback is None: if flt_params.fmt == "json": yield "\n]\n" else: yield "\n]);\n"
def _prepare_rec(spec, ignorenets, neverignore): # First of all, let's see if we are supposed to ignore this spec, # and if so, do so. if 'addr' in spec and \ spec.get('source') not in neverignore.get(spec['recontype'], []): for start, stop in ignorenets.get(spec['recontype'], ()): if start <= utils.force_ip2int(spec['addr']) <= stop: return None # Then, let's clean up the records. # Change Symantec's random user agents (matching SYMANTEC_UA) to # the constant string 'SymantecRandomUserAgent'. if spec['recontype'] == 'HTTP_CLIENT_HEADER' and \ spec.get('source') == 'USER-AGENT': if SYMANTEC_UA.match(spec['value']): spec['value'] = 'SymantecRandomUserAgent' elif KASPERSKY_UA.match(spec['value']): spec['value'] = 'KasperskyWeirdUserAgent' else: match = SYMANTEC_SEP_UA.match(spec['value']) if match is not None: spec['value'] = '%s%s' % match.groups() # Change any Digest authorization header to remove non-constant # information. On one hand we loose the necessary information to # try to recover the passwords, but on the other hand we store # specs with different challenges but the same username, realm, # host and sensor in the same records. elif (spec['recontype'] in {'HTTP_CLIENT_HEADER', 'HTTP_CLIENT_HEADER_SERVER'} and spec.get('source') in {'AUTHORIZATION', 'PROXY-AUTHORIZATION'}): value = spec['value'] if value: authtype = value.split(None, 1)[0] if authtype.lower() == 'digest': try: # we only keep relevant info spec['value'] = '%s %s' % (authtype, ','.join( val for val in _split_digest_auth(value[6:].strip()) if DIGEST_AUTH_INFOS.match(val))) except Exception: utils.LOGGER.warning("Cannot parse digest error for %r", spec, exc_info=True) elif ntlm._is_ntlm_message(value): # NTLM_NEGOTIATE and NTLM_AUTHENTICATE try: auth = utils.decode_b64(value.split(None, 1)[1].encode()) except (UnicodeDecodeError, TypeError, ValueError, binascii.Error): pass spec['value'] = "%s %s" % \ (value.split(None, 1)[0], ntlm._ntlm_dict2string(ntlm.ntlm_extract_info(auth))) elif authtype.lower() in {'negotiate', 'kerberos', 'oauth'}: spec['value'] = authtype elif (spec['recontype'] == 'HTTP_SERVER_HEADER' and spec.get('source') in {'WWW-AUTHENTICATE', 'PROXY-AUTHENTICATE'}): value = spec['value'] if value: authtype = value.split(None, 1)[0] if authtype.lower() == 'digest': try: # we only keep relevant info spec['value'] = '%s %s' % (authtype, ','.join( val for val in _split_digest_auth(value[6:].strip()) if DIGEST_AUTH_INFOS.match(val))) except Exception: utils.LOGGER.warning("Cannot parse digest error for %r", spec, exc_info=True) elif ntlm._is_ntlm_message(value): # NTLM_CHALLENGE try: auth = utils.decode_b64(value.split(None, 1)[1].encode()) except (UnicodeDecodeError, TypeError, ValueError, binascii.Error): pass spec['value'] = "%s %s" % \ (value.split(None, 1)[0], ntlm._ntlm_dict2string(ntlm.ntlm_extract_info(auth))) elif authtype.lower() in {'negotiate', 'kerberos', 'oauth'}: spec['value'] = authtype # TCP server banners: try to normalize data elif spec['recontype'] == 'TCP_SERVER_BANNER': newvalue = value = utils.nmap_decode_data(spec['value']) for pattern, replace in TCP_SERVER_PATTERNS: if pattern.search(newvalue): newvalue = pattern.sub(replace, newvalue) if newvalue != value: spec['value'] = utils.nmap_encode_data(newvalue) elif spec['recontype'] == 'TCP_CLIENT_BANNER': probe = utils.get_nmap_probes('tcp').get( utils.nmap_decode_data(spec['value'])) if probe is not None: spec.setdefault('infos', {}).update({ 'service_name': 'scanner', 'service_product': 'Nmap', 'service_extrainfo': 'TCP probe %s' % probe, }) elif spec['recontype'] == 'UDP_HONEYPOT_HIT': data = utils.nmap_decode_data(spec['value']) probe = utils.get_nmap_probes('udp').get(data) if probe is not None: spec.setdefault('infos', {}).update({ 'service_name': 'scanner', 'service_product': 'Nmap', 'service_extrainfo': 'UDP probe %s' % probe, }) else: payload = utils.get_nmap_udp_payloads().get(data) if payload is not None: spec.setdefault('infos', {}).update({ 'service_name': 'scanner', 'service_product': 'Nmap', 'service_extrainfo': 'UDP payload %s' % payload, }) # SSL_{CLIENT,SERVER} JA3 elif ((spec['recontype'] == 'SSL_CLIENT' and spec['source'] == 'ja3') or (spec['recontype'] == 'SSL_SERVER' and spec['source'].startswith('ja3-'))): value = spec['value'] spec.setdefault('infos', {})['raw'] = value spec['value'] = hashlib.new("md5", value.encode()).hexdigest() if spec['recontype'] == 'SSL_SERVER': clientvalue = spec['source'][4:] spec['infos'].setdefault('client', {})['raw'] = clientvalue spec['source'] = 'ja3-%s' % hashlib.new( "md5", clientvalue.encode(), ).hexdigest() # SSH_{CLIENT,SERVER}_HASSH elif spec['recontype'] in ['SSH_CLIENT_HASSH', 'SSH_SERVER_HASSH']: value = spec['value'] spec.setdefault('infos', {})['raw'] = value spec['value'] = hashlib.new("md5", value.encode()).hexdigest() # Check DNS Blacklist answer elif spec['recontype'] == 'DNS_ANSWER': if any((spec.get('value') or "").endswith(dnsbl) for dnsbl in config.DNS_BLACKLIST_DOMAINS): dnsbl_val = spec['value'] match = DNSBL_START.search(dnsbl_val) if match is not None: spec['recontype'] = 'DNS_BLACKLIST' spec['value'] = spec.get('addr') spec.update({ 'source': "%s-%s" % (dnsbl_val[match.end():], spec['source']) }) addr = match.group() # IPv4 if addr.count('.') == 4: spec['addr'] = '.'.join(addr.split('.')[3::-1]) # IPv6 else: spec['addr'] = utils.int2ip6( int(addr.replace('.', '')[::-1], 16)) return spec
def get_nmap(subdb): """Get records from Nmap & View databases :param str subdb: database to query (must be "scans" or "view") :query str q: query (including limit/skip and sort) :query str callback: callback to use for JSONP results :query bool ipsasnumbers: to get IP addresses as numbers rather than as strings :query bool datesasstrings: to get dates as strings rather than as timestamps :query str format: "json" (the default) or "ndjson" :status 200: no error :status 400: invalid referer :>jsonarr object: results """ subdb_tool = "view" if subdb == 'view' else "scancli" subdb = db.view if subdb == 'view' else db.nmap flt_params = get_nmap_base(subdb) # PostgreSQL: the query plan if affected by the limit and gives # really poor results. This is a temporary workaround (look for # XXX-WORKAROUND-PGSQL). # result = subdb.get(flt_params.flt, limit=flt_params.limit, # skip=flt_params.skip, sort=flt_params.sortby) result = subdb.get(flt_params.flt, skip=flt_params.skip, sort=flt_params.sortby) if flt_params.unused: msg = 'Option%s not understood: %s' % ( 's' if len(flt_params.unused) > 1 else '', ', '.join(flt_params.unused), ) if flt_params.callback is not None: yield webutils.js_alert("param-unused", "warning", msg) utils.LOGGER.warning(msg) elif flt_params.callback is not None: yield webutils.js_del_alert("param-unused") if config.DEBUG: msg1 = "filter: %s" % subdb.flt2str(flt_params.flt) msg2 = "user: %r" % webutils.get_user() utils.LOGGER.debug(msg1) utils.LOGGER.debug(msg2) if flt_params.callback is not None: yield webutils.js_alert("filter", "info", msg1) yield webutils.js_alert("user", "info", msg2) version_mismatch = {} if flt_params.callback is None: if flt_params.fmt == 'json': yield "[\n" else: yield "%s([\n" % flt_params.callback # XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): for fld in ['_id', 'scanid']: try: del rec[fld] except KeyError: pass if flt_params.ipsasnumbers: rec['addr'] = utils.force_ip2int(rec['addr']) for field in ['starttime', 'endtime']: if field in rec: if not flt_params.datesasstrings: rec[field] = int(utils.datetime2timestamp(rec[field])) for port in rec.get('ports', []): if 'screendata' in port: port['screendata'] = utils.encode_b64(port['screendata']) for script in port.get('scripts', []): if "masscan" in script: try: del script['masscan']['raw'] except KeyError: pass if not flt_params.ipsasnumbers: if 'traces' in rec: for trace in rec['traces']: trace['hops'].sort(key=lambda x: x['ttl']) for hop in trace['hops']: hop['ipaddr'] = utils.force_int2ip(hop['ipaddr']) addresses = rec.get('addresses', {}).get('mac') if addresses: newaddresses = [] for addr in addresses: manuf = utils.mac2manuf(addr) if manuf and manuf[0]: newaddresses.append({'addr': addr, 'manuf': manuf[0]}) else: newaddresses.append({'addr': addr}) rec['addresses']['mac'] = newaddresses if flt_params.fmt == 'ndjson': yield "%s\n" % json.dumps(rec, default=utils.serialize) else: yield "%s\t%s" % ('' if i == 0 else ',\n', json.dumps(rec, default=utils.serialize)) check = subdb.cmp_schema_version_host(rec) if check: version_mismatch[check] = version_mismatch.get(check, 0) + 1 # XXX-WORKAROUND-PGSQL if flt_params.limit and i + 1 >= flt_params.limit: break if flt_params.callback is None: if flt_params.fmt == 'json': yield "\n]\n" else: yield "\n]);\n" messages = { 1: lambda count: ("%d document%s displayed %s out-of-date. Please run " "the following command: 'ivre %s " "--update-schema;" % (count, 's' if count > 1 else '', 'are' if count > 1 else 'is', subdb_tool)), -1: lambda count: ('%d document%s displayed ha%s been inserted by ' 'a more recent version of IVRE. Please update ' 'IVRE!' % (count, 's' if count > 1 else '', 've' if count > 1 else 's')), } for mismatch, count in viewitems(version_mismatch): message = messages[mismatch](count) if flt_params.callback is not None: yield webutils.js_alert( "version-mismatch-%d" % ((mismatch + 1) // 2), "warning", message) utils.LOGGER.warning(message)