def displayfunction_json(cur: Iterable[Record], dbase: DB, no_screenshots: bool = False) -> None: indent: Optional[int] if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for h in cur: for fld in ["_id", "scanid"]: try: del h[fld] except KeyError: pass for port in h.get("ports", []): if no_screenshots: for fname in ["screenshot", "screendata"]: if fname in port: del port[fname] elif "screendata" in port: port["screendata"] = utils.encode_b64( dbase.from_binary(port["screendata"])) for script in port.get("scripts", []): if "masscan" in script and "raw" in script["masscan"]: script["masscan"]["raw"] = utils.encode_b64( dbase.from_binary(script["masscan"]["raw"])) json.dump(h, sys.stdout, indent=indent, default=dbase.serialize) sys.stdout.write("\n")
def displayfunction(x): if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for h in x: for fld in ['_id', 'scanid']: try: del h[fld] except KeyError: pass for port in h.get('ports', []): if args.no_screenshots: for fname in ['screenshot', 'screendata']: if fname in port: del port[fname] elif 'screendata' in port: port['screendata'] = utils.encode_b64( db.db.nmap.from_binary(port['screendata'])) for script in port.get('scripts', []): if 'masscan' in script and 'raw' in script['masscan']: script['masscan']['raw'] = utils.encode_b64( db.db.nmap.from_binary( script['masscan']['raw'])) print( json.dumps(h, indent=indent, default=db.db.nmap.serialize))
def displayfunction_json(cur, db, no_screenshots=False): if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for h in cur: for fld in ['_id', 'scanid']: try: del h[fld] except KeyError: pass for port in h.get('ports', []): if no_screenshots: for fname in ['screenshot', 'screendata']: if fname in port: del port[fname] elif 'screendata' in port: port['screendata'] = utils.encode_b64( db.from_binary(port['screendata']) ) for script in port.get('scripts', []): if 'masscan' in script and 'raw' in script['masscan']: script['masscan']['raw'] = utils.encode_b64( db.from_binary( script['masscan']['raw'] ) ) print(json.dumps(h, indent=indent, default=db.serialize))
def _extract_passive_SSL_cert(rec, cacert=False, server=True): script = {"id": "ssl-cacert" if cacert else "ssl-cert"} if server: port = { "state_state": "open", "state_reason": "passive", "port": rec["port"], "protocol": rec.get("protocol", "tcp"), "service_tunnel": "ssl", } else: port = { "port": -1, } info = rec["infos"] if info: pem = [] pem.append("-----BEGIN CERTIFICATE-----") pem.extend(wrap(utils.encode_b64(rec["value"]).decode(), 64)) pem.append("-----END CERTIFICATE-----") pem.append("") info["pem"] = "\n".join(pem) script["output"] = "\n".join(create_ssl_output(info)) script["ssl-cert"] = [info] port["scripts"] = [script] elif not server: # nothing interesting on a client w/o cert return {} return {"ports": [port]}
def _extract_passive_SSL_cert(rec, cacert=False, server=True): script = {"id": "ssl-cacert" if cacert else "ssl-cert"} if server: port = { 'state_state': 'open', 'state_reason': "passive", 'port': rec['port'], 'protocol': rec.get('protocol', 'tcp'), 'service_tunnel': 'ssl', } else: port = { 'port': -1, } info = rec['infos'] if info: pem = [] pem.append('-----BEGIN CERTIFICATE-----') pem.extend(wrap(utils.encode_b64(rec['value']).decode(), 64)) pem.append('-----END CERTIFICATE-----') pem.append('') info['pem'] = '\n'.join(pem) script['output'] = "\n".join(create_ssl_output(info)) script['ssl-cert'] = [info] port['scripts'] = [script] elif not server: # nothing interesting on a client w/o cert return {} return {'ports': [port]}
def get_anonymized_user(): """Return the HMAC value of the current user authenticated with the HMAC secret. """ return utils.encode_b64(hmac.new(config.WEB_SECRET, msg=get_user().encode()).digest()[:9])
def get_anonymized_user(): """Return the HMAC value of the current user authenticated with the HMAC secret. """ return utils.encode_b64( hmac.new(config.WEB_SECRET, msg=get_user().encode()).digest()[:9])
def _parse_items(data): res = {} items = dict(_gen_items(data)) if 0x50 not in items: utils.LOGGER.warning('No User Info in items [%r]', items) return res for itype, ivalue in _gen_items(items[0x50]): if itype == 0x51: try: ivalue = struct.unpack('>I', ivalue)[0] except struct.error: utils.LOGGER.warning( 'Cannot convert max_pdu_length value to an integer [%r]', ivalue, ) else: try: ivalue = ivalue.decode('ascii') except struct.error: utils.LOGGER.warning( 'Cannot convert value to an ASCII string [%r]', ivalue, ) ivalue = utils.encode_b64(ivalue).decode() try: itype = _USER_INFO_ITEMS[itype] except KeyError: utils.LOGGER.warning('Unknown item type in User Info %02x [%r]', itype, ivalue) itype = "unknown_%02x" % itype res[itype] = ivalue return res
def count_ips_by_city(country_code, city): return count_ips_by_data( 'GeoLite2-City.dump-IPv4.csv', lambda line: line[2] == country_code and line[4] == utils.encode_b64( (city or "").encode('utf-8') ).decode('utf-8'), )
def _ntlm_dict2string(dic): """ Returns a string with the keys and values (encoded in base64) of the given dict, in the format """ return ','.join("{}:{}".format(k, ( v if k == 'NTLM_Version' else utils.encode_b64(v.encode()).decode())) for k, v in dic.items())
def _handle_cert(dbase, rec, links): """Internal function to handle a record corresponding to an X509 certificate. """ raw_data = dbase.from_binary(rec['value']) cert = Certificate.from_data(raw_data, hash_sha256=rec['infos']['sha256']) rec['value'] = encode_b64(raw_data).decode() links.update( cert.link_to( CertificateSubject.get_or_create( value=rec['infos']['subject_text']), "cert-subject", "IVRE - X509 subject", )) links.update( cert.link_to( CertificateSubject.get_or_create( value=rec['infos']['issuer_text']), "cert-issuer", "IVRE - X509 issuer", )) commonname = rec['infos']['subject']['commonName'] if commonname: while commonname.startswith('*.'): commonname = commonname[2:] if commonname: _try_link(links, cert, Hostname, commonname, "cert-commonname", "IVRE - X509 Subject commonName") for san in rec['infos'].get('san', []): if san.startswith('DNS:'): san = san[4:] while san.startswith('*.'): san = san[2:] if san: _try_link(links, cert, Hostname, san, "cert-san", "IVRE - X509 subjectAltName") elif san.startswith('IP Address:'): san = san[11:] if san: _try_link(links, cert, Ip, san, "cert-san", "IVRE - X509 subjectAltName") elif san.startswith('email:'): san = san[6:] if san: _try_link(links, cert, Email, san, "cert-san", "IVRE - X509 subjectAltName") elif san.startswith('URI:'): san = san[4:] if san: _try_link(links, cert, Url, san, "cert-san", "IVRE - X509 subjectAltName") else: LOG.debug('_handle_rec: cannot handle subjectAltName: %r', san) return cert
def get_anonymized_user(): """Return the HMAC value of the current user authenticated with the HMAC secret. """ try: secret = config.WEB_SECRET.encode() except AttributeError: secret = config.WEB_SECRET return utils.encode_b64(hmac.new(secret, msg=get_user().encode()).digest()[:9])
def locids_by_city(country_code, city_name): fdesc = csv.DictReader(codecs.open(os.path.join( config.GEOIP_PATH, 'GeoLite2-City-Locations-%s.csv' % config.GEOIP_LANG, ), encoding='utf-8')) city_name = utils.encode_b64((city_name or "").encode('utf-8')).decode('utf-8') for line in fdesc: if (line['country_iso_code'], line['city_name']) == \ (country_code, city_name): yield int(line['geoname_id'])
def dump_city_ranges(self, fdesc): for data in self.db_city.get_ranges( [ "country->iso_code", "subdivisions->0->iso_code", "city->names->%s" % config.GEOIP_LANG, "city->geoname_id" ], cond=lambda line: (line[2] is not None and (line[3] is not None or line[4] is not None)), ): if data[0] > 0xffffffff: # only IPv4 break fdesc.write('%d,%d,%s,%s,%s,%s\n' % (data[:4] + (utils.encode_b64( data[4].encode('utf-8')).decode('utf-8'), ) + data[5:]))
def disp_recs_json(flt, sort, limit, skip): if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for rec in db.passive.get(flt, sort=sort, limit=limit, skip=skip): for fld in ['_id', 'scanid']: try: del rec[fld] except KeyError: pass if (rec.get('recontype') == 'SSL_SERVER' and rec.get('source') in {'cert', 'cacert'}): rec['value'] = utils.encode_b64(rec['value']).decode() print(json.dumps(rec, indent=indent, default=db.passive.serialize))
def locids_by_city(country_code, city_name): fdesc = csv.DictReader( codecs.open( os.path.join( config.GEOIP_PATH, "GeoLite2-City-Locations-%s.csv" % config.GEOIP_LANG, ), encoding="utf-8", )) city_name = utils.encode_b64((city_name or "").encode("utf-8")).decode("utf-8") for line in fdesc: if (line["country_iso_code"], line["city_name"]) == (country_code, city_name): yield int(line["geoname_id"])
def disp_recs_json(flt, sort, limit, skip): if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for rec in db.passive.get(flt, sort=sort, limit=limit, skip=skip): for fld in ['_id', 'scanid']: try: del rec[fld] except KeyError: pass if rec.get('recontype') == 'SSL_SERVER' and \ rec.get('source') == 'cert': rec['value'] = utils.encode_b64(rec['value']).decode() print(json.dumps(rec, indent=indent, default=db.passive.serialize))
def _parse_items(data: bytes) -> Dict[str, Union[int, str]]: res: Dict[str, Union[int, str]] = {} items = dict(_gen_items(data)) if 0x50 not in items: utils.LOGGER.warning("No User Info in items [%r]", items) return res ivalue: bytes ivalue_parsed: Union[int, str] for itype, ivalue in _gen_items(items[0x50]): if itype == 0x51: try: ivalue_parsed = cast(int, struct.unpack(">I", ivalue)[0]) except struct.error: utils.LOGGER.warning( "Cannot convert max_pdu_length value to an integer [%r]", ivalue, ) ivalue_parsed = utils.encode_b64(ivalue).decode() else: try: ivalue_parsed = ivalue.decode("ascii") except struct.error: utils.LOGGER.warning( "Cannot convert value to an ASCII string [%r]", ivalue, ) ivalue_parsed = utils.encode_b64(ivalue).decode() try: itype_parsed = _USER_INFO_ITEMS[itype] except KeyError: utils.LOGGER.warning( "Unknown item type in User Info %02x [%r]", itype, ivalue ) itype_parsed = "unknown_%02x" % itype res[itype_parsed] = ivalue_parsed return res
def displayfunction_json(cur, dbase, no_screenshots=False): if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for h in cur: for fld in ["_id", "scanid"]: try: del h[fld] except KeyError: pass for port in h.get("ports", []): if no_screenshots: for fname in ["screenshot", "screendata"]: if fname in port: del port[fname] elif "screendata" in port: port["screendata"] = utils.encode_b64( dbase.from_binary(port["screendata"])) for script in port.get("scripts", []): if "masscan" in script and "raw" in script["masscan"]: script["masscan"]["raw"] = utils.encode_b64( dbase.from_binary(script["masscan"]["raw"])) print(json.dumps(h, indent=indent, default=dbase.serialize))
def _ntlm_dict2string(dic): """ Returns a string with the keys and values (encoded in base64) of the given dict, in the format """ return ",".join( "{}:{}".format( k, ( v if k in {"NTLM_Version", "ntlm-fingerprint"} else utils.encode_b64(v.encode()).decode() ), ) for k, v in dic.items() )
def disp_recs_json(flt, sort, limit, skip): if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for rec in db.passive.get(flt, sort=sort, limit=limit, skip=skip): for fld in ["_id", "scanid"]: try: del rec[fld] except KeyError: pass if rec.get("recontype") == "SSL_SERVER" and rec.get("source") in { "cert", "cacert", }: rec["value"] = utils.encode_b64(rec["value"]).decode() print(json.dumps(rec, indent=indent, default=db.passive.serialize))
def _extract_passive_SSH_SERVER_HOSTKEY(rec): """Handle SSH host keys.""" # TODO: should (probably) be merged, sorted by date/time, keep one # entry per key type. # # (MAYBE) we should add a "lastseen" tag to every intel in view. value = utils.encode_b64(utils.nmap_decode_data(rec["value"])).decode() fingerprint = rec["infos"]["md5"] key = { "type": rec["infos"]["algo"], "key": value, "fingerprint": fingerprint } if "bits" in rec["infos"]: # FIXME key["bits"] = rec["infos"]["bits"] fingerprint = utils.decode_hex(fingerprint) script = { "id": "ssh-hostkey", "ssh-hostkey": [key], "output": "\n %s %s (%s)\n%s %s" % ( key.get("bits", "-"), # FIXME ":".join("%02x" % (ord(i) if isinstance(i, (bytes, str)) else i) for i in fingerprint), _KEYS.get( key["type"], (key["type"][4:] if key["type"][:4] == "ssh-" else key["type"]).upper(), ), key["type"], value, ), "key": key, } return { "ports": [{ "state_state": "open", "state_reason": "passive", "port": rec["port"], "protocol": rec.get("protocol", "tcp"), "service_name": "ssh", "scripts": [script], }] }
def disp_recs_json(dbase: DBPassive, flt: Filter, sort: Sort, limit: Optional[int], skip: Optional[int]) -> None: indent: Optional[int] if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for rec in dbase.get(flt, sort=sort, limit=limit, skip=skip): try: del rec["_id"] except KeyError: pass if rec.get("recontype") == "SSL_SERVER" and rec.get("source") in { "cert", "cacert", }: rec["value"] = utils.encode_b64(rec["value"]).decode() print(json.dumps(rec, indent=indent, default=dbase.serialize))
def locids_by_city(country_code: str, city_name: str) -> Generator[int, None, None]: assert config.GEOIP_PATH is not None with codecs.open( os.path.join( config.GEOIP_PATH, "GeoLite2-City-Locations-%s.csv" % config.GEOIP_LANG, ), encoding="utf-8", ) as fdesc: csvfd = csv.DictReader(fdesc) city_name = utils.encode_b64((city_name or "").encode("utf-8")).decode("utf-8") for line in csvfd: if (line["country_iso_code"], line["city_name"]) == ( country_code, city_name, ): yield int(line["geoname_id"])
def _extract_passive_SSH_SERVER_HOSTKEY(rec): """Handle SSH host keys.""" # TODO: should (probably) be merged, sorted by date/time, keep one # entry per key type. # # (MAYBE) we should add a "lastseen" tag to every intel in view. value = utils.encode_b64( utils.nmap_decode_data(rec.get('fullvalue', rec['value']))).decode() fingerprint = rec['infos']['md5'] key = { 'type': rec['infos']['algo'], 'key': value, 'fingerprint': fingerprint } if 'bits' in rec['infos']: # FIXME key['bits'] = rec['infos']['bits'] fingerprint = utils.decode_hex(fingerprint) script = { 'id': 'ssh-hostkey', 'ssh-hostkey': [key], 'output': '\n %s %s (%s)\n%s %s' % ( key.get('bits', '-'), # FIXME ':'.join('%02x' % (ord(i) if isinstance(i, (bytes, str)) else i) for i in fingerprint), _KEYS.get(key['type'], (key['type'][4:] if key['type'][:4] == 'ssh-' else key['type']).upper()), key['type'], value), 'key': key } return { 'ports': [{ 'state_state': 'open', 'state_reason': "passive", 'port': rec['port'], 'protocol': rec.get('protocol', 'tcp'), 'service_name': 'ssh', 'scripts': [script], }] }
def serialize(obj): """Return a JSON-compatible representation for `obj`""" if isinstance(obj, utils.REGEXP_T): return { "f": "regexp", "a": [ "/%s/%s" % ( obj.pattern, "".join(x.lower() for x in "ILMSXU" if getattr(re, x) & obj.flags), ), ], } if isinstance(obj, datetime): return {"f": "datetime", "a": [obj.timestamp()]} if isinstance(obj, bytes): return {"f": "bytes", "a": [utils.encode_b64(obj).decode()]} raise TypeError("Don't know what to do with %r (%r)" % (obj, type(obj)))
def disp_recs_json(flt, sort, limit, skip): if os.isatty(sys.stdout.fileno()): indent = 4 else: indent = None for rec in db.passive.get(flt, sort=sort, limit=limit, skip=skip): for fld in ['_id', 'scanid']: try: del rec[fld] except KeyError: pass if 'fullvalue' in rec: rec['value'] = rec.pop('fullvalue') if 'fullinfos' in rec: rec.setdefault('infos', {}).update(rec.pop('fullinfos')) if (rec.get('recontype') == 'SSL_SERVER' and rec.get('source') == 'cert' and isinstance(rec.get('value'), bytes)): rec['value'] = utils.encode_b64(rec['value']).decode() print(json.dumps(rec, indent=indent, default=db.passive.serialize))
def _extract_passive_SSH_SERVER_HOSTKEY(rec): """Handle SSH host keys.""" # TODO: should (probably) be merged, sorted by date/time, keep one # entry per key type. # # (MAYBE) we should add a "lastseen" tag to every intel in view. value = utils.encode_b64( utils.nmap_decode_data(rec['value']) ).decode() fingerprint = rec['infos']['md5'] key = {'type': rec['infos']['algo'], 'key': value, 'fingerprint': fingerprint} if 'bits' in rec['infos']: # FIXME key['bits'] = rec['infos']['bits'] fingerprint = utils.decode_hex(fingerprint) script = { 'id': 'ssh-hostkey', 'ssh-hostkey': [key], 'output': '\n %s %s (%s)\n%s %s' % ( key.get('bits', '-'), # FIXME ':'.join('%02x' % ( ord(i) if isinstance(i, (bytes, str)) else i ) for i in fingerprint), _KEYS.get( key['type'], (key['type'][4:] if key['type'][:4] == 'ssh-' else key['type']).upper() ), key['type'], value ), 'key': key } return {'ports': [{ 'state_state': 'open', 'state_reason': "passive", 'port': rec['port'], 'protocol': rec.get('protocol', 'tcp'), 'service_name': 'ssh', 'scripts': [script], }]}
def main(): # write headers sys.stdout.write(webutils.JS_HEADERS) params = webutils.parse_query_string() query = webutils.query_from_params(params) flt, archive, sortby, unused, skip, limit = webutils.flt_from_query(query) if limit is None: limit = config.WEB_LIMIT if config.WEB_MAXRESULTS is not None: limit = min(limit, config.WEB_MAXRESULTS) callback = params.get("callback") # type of result action = params.get("action", "") ipsasnumbers = params.get("ipsasnumbers") datesasstrings = params.get("datesasstrings") if callback is None: sys.stdout.write('Content-Disposition: attachment; ' 'filename="IVRE-results.json"\r\n') sys.stdout.write("\r\n") # top values if action.startswith('topvalues:'): field = action[10:] if field[0] in '-!': field = field[1:] least = True else: least = False topnbr = 15 if ':' in field: field, topnbr = field.rsplit(':', 1) try: topnbr = int(topnbr) except ValueError: field = '%s:%s' % (field, topnbr) topnbr = 15 series = [{"label": t['_id'], "value": t['count']} for t in db.nmap.topvalues(field, flt=flt, least=least, topnbr=topnbr, archive=archive)] if callback is None: sys.stdout.write("%s\n" % json.dumps(series)) else: sys.stdout.write("%s(%s);\n" % (callback, json.dumps(series))) exit(0) # extract info if action in ["onlyips", "ipsports", "timeline", "coordinates", "countopenports", "diffcats"]: preamble = "[\n" postamble = "]\n" r2res = lambda x: x if action == "timeline": if hasattr(db.nmap, "get_open_port_count"): result = list(db.nmap.get_open_port_count(flt, archive=archive)) count = len(result) else: result = db.nmap.get( flt, archive=archive, fields=['addr', 'starttime', 'openports.count'] ) count = result.count() if params.get("modulo") is None: r2time = lambda r: int(r['starttime'].strftime('%s')) else: r2time = lambda r: (int(r['starttime'].strftime('%s')) % int(params.get("modulo"))) if ipsasnumbers: r2res = lambda r: [r2time(r), force_ip_int(r['addr']), r['openports']['count']] else: r2res = lambda r: [r2time(r), force_ip_str(r['addr']), r['openports']['count']] elif action == "coordinates": preamble = '{"type": "GeometryCollection", "geometries": [' postamble = ']}' result = list(db.nmap.getlocations(flt, archive=archive)) count = len(result) r2res = lambda r: { "type": "Point", "coordinates": r['_id'], "properties": {"count": r['count']}, } elif action == "countopenports": if hasattr(db.nmap, "get_open_port_count"): result = db.nmap.get_open_port_count(flt, archive=archive) else: result = db.nmap.get(flt, archive=archive, fields=['addr', 'openports.count']) if hasattr(result, "count"): count = result.count() else: count = db.nmap.count(flt, archive=archive, fields=['addr', 'openports.count']) if ipsasnumbers: r2res = lambda r: [force_ip_int(r['addr']), r['openports']['count']] else: r2res = lambda r: [force_ip_str(r['addr']), r['openports']['count']] elif action == "ipsports": if hasattr(db.nmap, "get_ips_ports"): result = list(db.nmap.get_ips_ports(flt, archive=archive)) count = sum(len(host.get('ports', [])) for host in result) else: result = db.nmap.get( flt, archive=archive, fields=['addr', 'ports.port', 'ports.state_state'] ) count = sum(len(host.get('ports', [])) for host in result) result.rewind() if ipsasnumbers: r2res = lambda r: [ force_ip_int(r['addr']), [[p['port'], p['state_state']] for p in r.get('ports', []) if 'state_state' in p] ] else: r2res = lambda r: [ force_ip_str(r['addr']), [[p['port'], p['state_state']] for p in r.get('ports', []) if 'state_state' in p] ] elif action == "onlyips": result = db.nmap.get(flt, archive=archive, fields=['addr']) if hasattr(result, "count"): count = result.count() else: count = db.nmap.count(flt, archive=archive, fields=['addr']) if ipsasnumbers: r2res = lambda r: r['addr'] else: r2res = lambda r: utils.int2ip(r['addr']) elif action == "diffcats": if params.get("onlydiff"): output = db.nmap.diff_categories(params.get("cat1"), params.get("cat2"), flt=flt, include_both_open=False) else: output = db.nmap.diff_categories(params.get("cat1"), params.get("cat2"), flt=flt) count = 0 result = {} if ipsasnumbers: for res in output: result.setdefault(res["addr"], []).append([res['port'], res['value']]) count += 1 else: for res in output: result.setdefault(utils.int2ip(res["addr"]), []).append([res['port'], res['value']]) count += 1 result = viewitems(result) if count >= config.WEB_WARN_DOTS_COUNT: sys.stdout.write( 'if(confirm("You are about to ask your browser to display %d ' 'dots, which is a lot and might slow down, freeze or crash ' 'your browser. Do you want to continue?")) {\n' % count ) if callback is not None: sys.stdout.write("%s(\n" % callback) sys.stdout.write(preamble) for rec in result: sys.stdout.write(json.dumps(r2res(rec)) + ",\n") sys.stdout.write(postamble) if callback is not None: sys.stdout.write(");") sys.stdout.write("\n") if count >= config.WEB_WARN_DOTS_COUNT: sys.stdout.write('}\n') exit(0) # generic request if action == "count": if callback is None: sys.stdout.write("%d\n" % db.nmap.count(flt, archive=archive)) else: sys.stdout.write("%s(%d);\n" % (callback, db.nmap.count(flt, archive=archive))) exit(0) ## PostgreSQL: the query plan if affected by the limit and gives ## really poor results. This is a temporary workaround (look for ## XXX-WORKAROUND-PGSQL) # result = db.nmap.get(flt, archive=archive, # limit=limit, skip=skip, sort=sortby) result = db.nmap.get(flt, archive=archive, skip=skip, sort=sortby) if unused: msg = 'Option%s not understood: %s' % ( 's' if len(unused) > 1 else '', ', '.join(unused), ) sys.stdout.write(webutils.js_alert("param-unused", "warning", msg)) utils.LOGGER.warning(msg) elif callback is not None: sys.stdout.write(webutils.js_del_alert("param-unused")) if config.DEBUG: msg = "filter: %r" % flt sys.stdout.write(webutils.js_alert("filter", "info", msg)) utils.LOGGER.debug(msg) msg = "user: %r" % webutils.get_user() sys.stdout.write(webutils.js_alert("user", "info", msg)) utils.LOGGER.debug(msg) version_mismatch = {} if callback is None: tab, sep = "", "\n" else: tab, sep = "\t", ",\n" sys.stdout.write("%s([\n" % callback) ## XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): for fld in ['_id', 'scanid']: try: del rec[fld] except KeyError: pass if not ipsasnumbers: try: rec['addr'] = utils.int2ip(rec['addr']) except: pass for field in ['starttime', 'endtime']: if field in rec: if not datesasstrings: rec[field] = int(rec[field].strftime('%s')) for port in rec.get('ports', []): if 'screendata' in port: port['screendata'] = utils.encode_b64(port['screendata']) for script in port.get('scripts', []): if "masscan" in script: try: del script['masscan']['raw'] except KeyError: pass if not ipsasnumbers: if 'traces' in rec: for trace in rec['traces']: trace['hops'].sort(key=lambda x: x['ttl']) for hop in trace['hops']: try: hop['ipaddr'] = utils.int2ip(hop['ipaddr']) except: pass sys.stdout.write("%s%s%s" % ( tab, json.dumps(rec, default=utils.serialize), sep )) check = db.nmap.cmp_schema_version_host(rec) if check: version_mismatch[check] = version_mismatch.get(check, 0) + 1 # XXX-WORKAROUND-PGSQL if i + 1>= limit: break if callback is not None: sys.stdout.write("]);\n") messages = { 1: lambda count: ("%d document%s displayed %s out-of-date. Please run " "the following commands: 'ivre scancli " "--update-schema; ivre scancli --update-schema " "--archives'" % (count, 's' if count > 1 else '', 'are' if count > 1 else 'is')), -1: lambda count: ('%d document%s displayed ha%s been inserted by ' 'a more recent version of IVRE. Please update ' 'IVRE!' % (count, 's' if count > 1 else '', 've' if count > 1 else 's')), } for mismatch, count in viewitems(version_mismatch): message = messages[mismatch](count) sys.stdout.write( webutils.js_alert("version-mismatch-%d" % ((mismatch + 1) / 2), "warning", message) ) utils.LOGGER.warning(message)
def get_nmap(): flt_params = get_nmap_base() # PostgreSQL: the query plan if affected by the limit and gives # really poor results. This is a temporary workaround (look for # XXX-WORKAROUND-PGSQL). # result = db.view.get(flt_params.flt, limit=flt_params.limit, # skip=flt_params.skip, sort=flt_params.sortby) result = db.view.get(flt_params.flt, skip=flt_params.skip, sort=flt_params.sortby) if flt_params.unused: msg = 'Option%s not understood: %s' % ( 's' if len(flt_params.unused) > 1 else '', ', '.join(flt_params.unused), ) if flt_params.callback is not None: yield webutils.js_alert("param-unused", "warning", msg) utils.LOGGER.warning(msg) elif flt_params.callback is not None: yield webutils.js_del_alert("param-unused") if config.DEBUG: msg1 = "filter: %s" % db.view.flt2str(flt_params.flt) msg2 = "user: %r" % webutils.get_user() utils.LOGGER.debug(msg1) utils.LOGGER.debug(msg2) if flt_params.callback is not None: yield webutils.js_alert("filter", "info", msg1) yield webutils.js_alert("user", "info", msg2) version_mismatch = {} if flt_params.callback is None: yield "[\n" else: yield "%s([\n" % flt_params.callback # XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): for fld in ['_id', 'scanid']: try: del rec[fld] except KeyError: pass if not flt_params.ipsasnumbers: rec['addr'] = utils.force_int2ip(rec['addr']) for field in ['starttime', 'endtime']: if field in rec: if not flt_params.datesasstrings: rec[field] = int(utils.datetime2timestamp(rec[field])) for port in rec.get('ports', []): if 'screendata' in port: port['screendata'] = utils.encode_b64(port['screendata']) for script in port.get('scripts', []): if "masscan" in script: try: del script['masscan']['raw'] except KeyError: pass if not flt_params.ipsasnumbers: if 'traces' in rec: for trace in rec['traces']: trace['hops'].sort(key=lambda x: x['ttl']) for hop in trace['hops']: hop['ipaddr'] = utils.force_int2ip(hop['ipaddr']) yield "%s\t%s" % ('' if i == 0 else ',\n', json.dumps(rec, default=utils.serialize)) check = db.view.cmp_schema_version_host(rec) if check: version_mismatch[check] = version_mismatch.get(check, 0) + 1 # XXX-WORKAROUND-PGSQL if i + 1 >= flt_params.limit: break if flt_params.callback is None: yield "\n]\n" else: yield "\n]);\n" messages = { 1: lambda count: ("%d document%s displayed %s out-of-date. Please run " "the following command: 'ivre scancli " "--update-schema;" % (count, 's' if count > 1 else '', 'are' if count > 1 else 'is')), -1: lambda count: ('%d document%s displayed ha%s been inserted by ' 'a more recent version of IVRE. Please update ' 'IVRE!' % (count, 's' if count > 1 else '', 've' if count > 1 else 's')), } for mismatch, count in viewitems(version_mismatch): message = messages[mismatch](count) if flt_params.callback is not None: yield webutils.js_alert( "version-mismatch-%d" % ((mismatch + 1) // 2), "warning", message ) utils.LOGGER.warning(message)
def get_passive(): """Get records from Passive database :query str q: query (only used for limit/skip and sort) :query str f: filter :query str callback: callback to use for JSONP results :query bool ipsasnumbers: to get IP addresses as numbers rather than as strings :query bool datesasstrings: to get dates as strings rather than as timestamps :query str format: "json" (the default) or "ndjson" :status 200: no error :status 400: invalid referer :>jsonarr object: results """ flt_params = get_base(db.passive) # PostgreSQL: the query plan if affected by the limit and gives # really poor results. This is a temporary workaround (look for # XXX-WORKAROUND-PGSQL). # result = db.passive.get(flt_params.flt, limit=flt_params.limit, # skip=flt_params.skip, sort=flt_params.sortby) result = db.passive.get( flt_params.flt, skip=flt_params.skip, sort=flt_params.sortby, fields=flt_params.fields, ) if flt_params.callback is None: if flt_params.fmt == "json": yield "[\n" else: yield "%s([\n" % flt_params.callback # XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): try: del rec["_id"] except KeyError: pass if "addr" in rec and flt_params.ipsasnumbers: rec["addr"] = utils.force_ip2int(rec["addr"]) if not flt_params.datesasstrings: for field in db.passive.datetime_fields: _set_datetime_field(db.passive, rec, field) if rec.get("recontype") == "SSL_SERVER" and rec.get("source") in { "cert", "cacert", }: rec["value"] = utils.encode_b64(rec["value"]).decode() if flt_params.fmt == "ndjson": yield "%s\n" % json.dumps(rec, default=utils.serialize) else: yield "%s\t%s" % ( "" if i == 0 else ",\n", json.dumps(rec, default=utils.serialize), ) if flt_params.limit and i + 1 >= flt_params.limit: break if flt_params.callback is None: if flt_params.fmt == "json": yield "\n]\n" else: yield "\n]);\n"
def get_nmap(subdb): """Get records from Nmap & View databases :param str subdb: database to query (must be "scans" or "view") :query str q: query (including limit/skip and sort) :query str f: filter :query str callback: callback to use for JSONP results :query bool ipsasnumbers: to get IP addresses as numbers rather than as strings :query bool datesasstrings: to get dates as strings rather than as timestamps :query str format: "json" (the default) or "ndjson" :status 200: no error :status 400: invalid referer :>jsonarr object: results """ subdb_tool = "view" if subdb == "view" else "scancli" subdb = db.view if subdb == "view" else db.nmap flt_params = get_base(subdb) # PostgreSQL: the query plan if affected by the limit and gives # really poor results. This is a temporary workaround (look for # XXX-WORKAROUND-PGSQL). # result = subdb.get(flt_params.flt, limit=flt_params.limit, # skip=flt_params.skip, sort=flt_params.sortby) result = subdb.get( flt_params.flt, skip=flt_params.skip, sort=flt_params.sortby, fields=flt_params.fields, ) if flt_params.unused: msg = "Option%s not understood: %s" % ( "s" if len(flt_params.unused) > 1 else "", ", ".join(flt_params.unused), ) if flt_params.callback is not None: yield webutils.js_alert("param-unused", "warning", msg) utils.LOGGER.warning(msg) elif flt_params.callback is not None: yield webutils.js_del_alert("param-unused") if config.DEBUG: msg1 = "filter: %r" % subdb.flt2str(flt_params.flt) msg2 = "user: %r" % webutils.get_user() utils.LOGGER.debug(msg1) utils.LOGGER.debug(msg2) if flt_params.callback is not None: yield webutils.js_alert("filter", "info", msg1) yield webutils.js_alert("user", "info", msg2) version_mismatch = {} if flt_params.callback is None: if flt_params.fmt == "json": yield "[\n" else: yield "%s([\n" % flt_params.callback # XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): for fld in ["_id", "scanid"]: try: del rec[fld] except KeyError: pass if flt_params.ipsasnumbers: rec["addr"] = utils.force_ip2int(rec["addr"]) if not flt_params.datesasstrings: for field in subdb.datetime_fields: _set_datetime_field(subdb, rec, field) for port in rec.get("ports", []): if "screendata" in port: port["screendata"] = utils.encode_b64(port["screendata"]) for script in port.get("scripts", []): if "masscan" in script: try: del script["masscan"]["raw"] except KeyError: pass if not flt_params.ipsasnumbers: if "traces" in rec: for trace in rec["traces"]: trace["hops"].sort(key=lambda x: x["ttl"]) for hop in trace["hops"]: hop["ipaddr"] = utils.force_int2ip(hop["ipaddr"]) addresses = rec.get("addresses", {}).get("mac") if addresses: newaddresses = [] for addr in addresses: manuf = utils.mac2manuf(addr) if manuf and manuf[0]: newaddresses.append({"addr": addr, "manuf": manuf[0]}) else: newaddresses.append({"addr": addr}) rec["addresses"]["mac"] = newaddresses if flt_params.fmt == "ndjson": yield "%s\n" % json.dumps(rec, default=utils.serialize) else: yield "%s\t%s" % ( "" if i == 0 else ",\n", json.dumps(rec, default=utils.serialize), ) check = subdb.cmp_schema_version_host(rec) if check: version_mismatch[check] = version_mismatch.get(check, 0) + 1 # XXX-WORKAROUND-PGSQL if flt_params.limit and i + 1 >= flt_params.limit: break if flt_params.callback is None: if flt_params.fmt == "json": yield "\n]\n" else: yield "\n]);\n" messages = { 1: lambda count: ("%d document%s displayed %s out-of-date. Please run " "the following command: 'ivre %s " "--update-schema;" % ( count, "s" if count > 1 else "", "are" if count > 1 else "is", subdb_tool, )), -1: lambda count: ("%d document%s displayed ha%s been inserted by " "a more recent version of IVRE. Please update " "IVRE!" % (count, "s" if count > 1 else "", "ve" if count > 1 else "s")), } for mismatch, count in version_mismatch.items(): message = messages[mismatch](count) if flt_params.callback is not None: yield webutils.js_alert( "version-mismatch-%d" % ((mismatch + 1) // 2), "warning", message) utils.LOGGER.warning(message)
def zgrap_parser_http(data: Dict[str, Any], hostrec: NmapHost, port: Optional[int] = None) -> NmapPort: """This function handles data from `{"data": {"http": [...]}}` records. `data` should be the content, i.e. the `[...]`. It should consist of simple dictionary, that may contain a `"response"` key and/or a `"redirect_response_chain"` key. The output is a port dict (i.e., the content of the "ports" key of an `nmap` of `view` record in IVRE), that may be empty. """ if not data: return {} # for zgrab2 results if "result" in data: data.update(data.pop("result")) if "response" not in data: utils.LOGGER.warning('Missing "response" field in zgrab HTTP result') return {} resp = data["response"] needed_fields = set(["request", "status_code", "status_line"]) missing_fields = needed_fields.difference(resp) if missing_fields: utils.LOGGER.warning( "Missing field%s %s in zgrab HTTP result", "s" if len(missing_fields) > 1 else "", ", ".join(repr(fld) for fld in missing_fields), ) return {} req = resp["request"] url = req.get("url") res: NmapPort = { "service_name": "http", "service_method": "probed", "state_state": "open", "state_reason": "response", "protocol": "tcp", } tls = None try: tls = req["tls_handshake"] except KeyError: # zgrab2 try: tls = req["tls_log"]["handshake_log"] except KeyError: pass if tls is not None: res["service_tunnel"] = "ssl" try: cert = tls["server_certificates"]["certificate"]["raw"] except KeyError: pass else: output, info_cert = create_ssl_cert(cert.encode(), b64encoded=True) if info_cert: res.setdefault("scripts", []).append({ "id": "ssl-cert", "output": output, "ssl-cert": info_cert, }) for cert in info_cert: add_cert_hostnames(cert, hostrec.setdefault("hostnames", [])) if url: try: _, guessed_port = utils.url2hostport("%(scheme)s://%(host)s" % url) except ValueError: utils.LOGGER.warning("Cannot guess port from url %r", url) guessed_port = 80 # because reasons else: if port is not None and port != guessed_port: utils.LOGGER.warning( "Port %d found from the URL %s differs from the provided port " "value %d", guessed_port, url.get("path"), port, ) port = guessed_port if port is None: port = guessed_port # Specific paths if url.get("path").endswith("/.git/index"): if resp.get("status_code") != 200: return {} if not resp.get("body", "").startswith("DIRC"): return {} # Due to an issue with ZGrab2 output, we cannot, for now, # process the content of the file. See # <https://github.com/zmap/zgrab2/issues/263>. repository = "%s:%d%s" % (hostrec["addr"], port, url["path"][:-5]) res["port"] = port res.setdefault("scripts", []).append({ "id": "http-git", "output": "\n %s\n Git repository found!\n" % repository, "http-git": [ { "repository": repository, "files-found": [".git/index"] }, ], }) return res if url.get("path").endswith("/owa/auth/logon.aspx"): if resp.get("status_code") != 200: return {} version_set = set( m.group(1) for m in _EXPR_OWA_VERSION.finditer(resp.get("body", ""))) if not version_set: return {} version_list = sorted(version_set, key=lambda v: [int(x) for x in v.split(".")]) res["port"] = port path = url["path"][:-15] if version_list: parsed_version = EXCHANGE_BUILDS.get(version_list[0], "unknown build number") if len(version_list) > 1: version_list = [ "%s (%s)" % (vers, EXCHANGE_BUILDS.get(vers, "unknown build number")) for vers in version_list ] output = "OWA: path %s, version %s (multiple versions found!)" % ( path, " / ".join(version_list), ) else: output = "OWA: path %s, version %s (%s)" % ( path, version_list[0], parsed_version, ) res.setdefault("scripts", []).append({ "id": "http-app", "output": output, "http-app": [{ "path": path, "application": "OWA", "version": version_list[0], "parsed_version": parsed_version, }], }) return res if url.get("path").endswith("/centreon/"): if resp.get("status_code") != 200: return {} if not resp.get("body"): return {} body = resp["body"] res["port"] = port path = url["path"] match = _EXPR_TITLE.search(body) if match is None: return {} if match.groups()[0] != "Centreon - IT & Network Monitoring": return {} match = _EXPR_CENTREON_VERSION.search(body) version: Optional[str] if match is None: version = None else: version = match.group(1) or match.group(2) res.setdefault("scripts", []).append({ "id": "http-app", "output": "Centreon: path %s%s" % ( path, "" if version is None else (", version %s" % version), ), "http-app": [ dict( { "path": path, "application": "Centreon" }, **({} if version is None else { "version": version }), ) ], }) return res if url.get("path").endswith("/.well-known/security.txt"): if resp.get("status_code") != 200: return {} if not resp.get("headers"): return {} if not any( ctype.split(";", 1)[0].lower() == "text/plain" for ctype in resp["headers"].get("content_type", [])): return {} if not resp.get("body"): return {} body = resp["body"] res["port"] = port parsed: Dict[str, List[str]] = {} for line in body.splitlines(): line = line.strip().split("#", 1)[0] if not line: continue if ":" not in line: utils.LOGGER.warning( "Invalid line in security.txt file [%r]", line) continue key, value = line.split(":", 1) parsed.setdefault(key.strip().lower(), []).append(value.strip()) res.setdefault("scripts", []).append({ "id": "http-securitytxt", "output": body, "http-securitytxt": {key: " / ".join(value) for key, value in parsed.items()}, }) return res if url.get("path") != "/": utils.LOGGER.warning("URL path not supported yet: %s", url.get("path")) return {} elif port is None: if req.get("tls_handshake") or req.get("tls_log"): port = 443 else: port = 80 res["port"] = port # Since Zgrab does not preserve the order of the headers, we need # to reconstruct a banner to use Nmap fingerprints if resp.get("headers"): headers = resp["headers"] # Check the Authenticate header first: if we requested it with # an Authorization header, we don't want to gather other information if headers.get("www_authenticate"): auths = headers.get("www_authenticate") for auth in auths: if ntlm._is_ntlm_message(auth): try: infos = ntlm.ntlm_extract_info( utils.decode_b64(auth.split(None, 1)[1].encode())) except (UnicodeDecodeError, TypeError, ValueError, binascii.Error): continue if not infos: continue keyvals = zip(ntlm_values, [infos.get(k) for k in ntlm_values]) output = "\n".join("{}: {}".format(k, v) for k, v in keyvals if v) res.setdefault("scripts", []).append({ "id": "ntlm-info", "output": output, "ntlm-info": dict(infos, protocol="http"), }) if "DNS_Computer_Name" in infos: add_hostname( infos["DNS_Computer_Name"], "ntlm", hostrec.setdefault("hostnames", []), ) if any(val.lower().startswith("ntlm") for val in req.get("headers", {}).get("authorization", [])): return res # If we have headers_raw value, let's use it. Else, let's fake it as well as we can. http_hdrs: List[HttpHeader] = [] output_list: List[str] = [] has_raw_value = False if resp.get("headers_raw"): try: banner = utils.decode_b64(resp.get("headers_raw").encode()) except Exception: utils.LOGGER.warning( "Cannot decode raw headers, using parsed result") else: output_list = [ utils.nmap_encode_data(line) for line in re.split(b"\r?\n", banner) ] banner_split = banner.split(b"\n") http_hdrs = [{ "name": "_status", "value": utils.nmap_encode_data(banner_split[0].strip()), }] http_hdrs.extend( { "name": utils.nmap_encode_data(hdrname).lower(), "value": utils.nmap_encode_data(hdrval), } for hdrname, hdrval in (m.groups() for m in ( utils.RAW_HTTP_HEADER.search(part.strip()) for part in banner_split) if m)) has_raw_value = True if not has_raw_value: # no headers_raw or decoding failed # The order will be incorrect! banner = (utils.nmap_decode_data(resp["protocol"]["name"]) + b" " + utils.nmap_decode_data(resp["status_line"]) + b"\r\n") line = "%s %s" % (resp["protocol"]["name"], resp["status_line"]) http_hdrs = [{"name": "_status", "value": line}] output_list = [line] for unk in headers.pop("unknown", []): headers[unk["key"]] = unk["value"] for hdr, values in headers.items(): hdr = hdr.replace("_", "-") for val in values: http_hdrs.append({"name": hdr, "value": val}) output_list.append("%s: %s" % (hdr, val)) if headers.get("server"): banner += (b"Server: " + utils.nmap_decode_data(headers["server"][0]) + b"\r\n\r\n") if http_hdrs: method = req.get("method") if method: output_list.append("") output_list.append("(Request type: %s)" % method) script: NmapScript = { "id": "http-headers", "output": "\n".join(output_list), "http-headers": http_hdrs, } if has_raw_value: script["masscan"] = {"raw": utils.encode_b64(banner).decode()} res.setdefault("scripts", []).append(script) handle_http_headers(hostrec, res, http_hdrs, path=url.get("path")) info: NmapServiceMatch = utils.match_nmap_svc_fp(banner, proto="tcp", probe="GetRequest") if info: add_cpe_values(hostrec, "ports.port:%s" % port, info.pop("cpe", [])) res.update(cast(NmapPort, info)) add_service_hostname(info, hostrec.setdefault("hostnames", [])) if resp.get("body"): body = resp["body"] res.setdefault("scripts", []).append({ "id": "http-content", "output": utils.nmap_encode_data(body.encode()), }) handle_http_content(hostrec, res, body.encode()) return res
for start, stop in _get_by_data(datafile, condition): rnge.append(start, stop) return rnge get_ranges_by_country = lambda code: get_ranges_by_data( "GeoLite2-Country.dump-IPv4.csv", lambda line: line[2] == code, ) get_ranges_by_location = lambda locid: get_ranges_by_data( 'GeoLite2-City.dump-IPv4.csv', lambda line: line[5] == str(locid)) get_ranges_by_city = lambda country_code, city: get_ranges_by_data( 'GeoLite2-City.dump-IPv4.csv', lambda line: line[2] == country_code and line[4] == utils.encode_b64( (city or "").encode('utf-8')).decode('utf-8'), ) get_ranges_by_region = lambda country_code, reg_code: get_ranges_by_data( 'GeoLite2-City.dump-IPv4.csv', lambda line: line[2] == country_code and line[3] == reg_code, ) get_ranges_by_asnum = lambda asnum: get_ranges_by_data( "GeoLite2-ASN.dump-IPv4.csv", lambda line: line[2] == str(asnum), ) get_routable_ranges = lambda: get_ranges_by_data('BGP.csv', lambda _: True)
def get_nmap(): flt_params = get_nmap_base() ## PostgreSQL: the query plan if affected by the limit and gives ## really poor results. This is a temporary workaround (look for ## XXX-WORKAROUND-PGSQL) # result = db.view.get(flt_params.flt, limit=flt_params.limit, # skip=flt_params.skip, sort=flt_params.sortby) result = db.view.get(flt_params.flt, skip=flt_params.skip, sort=flt_params.sortby) if flt_params.unused: msg = 'Option%s not understood: %s' % ( 's' if len(flt_params.unused) > 1 else '', ', '.join(flt_params.unused), ) if flt_params.callback is not None: yield webutils.js_alert("param-unused", "warning", msg) utils.LOGGER.warning(msg) elif flt_params.callback is not None: yield webutils.js_del_alert("param-unused") if config.DEBUG: msg1 = "filter: %s" % db.view.flt2str(flt_params.flt) msg2 = "user: %r" % webutils.get_user() utils.LOGGER.debug(msg1) utils.LOGGER.debug(msg2) if flt_params.callback is not None: yield webutils.js_alert("filter", "info", msg1) yield webutils.js_alert("user", "info", msg2) version_mismatch = {} if flt_params.callback is None: yield "[\n" else: yield "%s([\n" % flt_params.callback ## XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): for fld in ['_id', 'scanid']: try: del rec[fld] except KeyError: pass if not flt_params.ipsasnumbers: rec['addr'] = utils.force_int2ip(rec['addr']) for field in ['starttime', 'endtime']: if field in rec: if not flt_params.datesasstrings: rec[field] = int(utils.datetime2timestamp(rec[field])) for port in rec.get('ports', []): if 'screendata' in port: port['screendata'] = utils.encode_b64(port['screendata']) for script in port.get('scripts', []): if "masscan" in script: try: del script['masscan']['raw'] except KeyError: pass if not flt_params.ipsasnumbers: if 'traces' in rec: for trace in rec['traces']: trace['hops'].sort(key=lambda x: x['ttl']) for hop in trace['hops']: hop['ipaddr'] = utils.force_int2ip(hop['ipaddr']) yield "%s\t%s" % ('' if i == 0 else ',\n', json.dumps(rec, default=utils.serialize)) check = db.view.cmp_schema_version_host(rec) if check: version_mismatch[check] = version_mismatch.get(check, 0) + 1 # XXX-WORKAROUND-PGSQL if i + 1 >= flt_params.limit: break if flt_params.callback is None: yield "\n]\n" else: yield "\n]);\n" messages = { 1: lambda count: ("%d document%s displayed %s out-of-date. Please run " "the following command: 'ivre scancli " "--update-schema;" % (count, 's' if count > 1 else '', 'are' if count > 1 else 'is')), -1: lambda count: ('%d document%s displayed ha%s been inserted by ' 'a more recent version of IVRE. Please update ' 'IVRE!' % (count, 's' if count > 1 else '', 've' if count > 1 else 's')), } for mismatch, count in viewitems(version_mismatch): message = messages[mismatch](count) if flt_params.callback is not None: yield webutils.js_alert( "version-mismatch-%d" % ((mismatch + 1) // 2), "warning", message) utils.LOGGER.warning(message)
def get_nmap(subdb): """Get records from Nmap & View databases :param str subdb: database to query (must be "scans" or "view") :query str q: query (including limit/skip and sort) :query str callback: callback to use for JSONP results :query bool ipsasnumbers: to get IP addresses as numbers rather than as strings :query bool datesasstrings: to get dates as strings rather than as timestamps :status 200: no error :status 400: invalid referer :>jsonarr object: results """ subdb_tool = "view" if subdb == 'view' else "scancli" subdb = db.view if subdb == 'view' else db.nmap flt_params = get_nmap_base(subdb) # PostgreSQL: the query plan if affected by the limit and gives # really poor results. This is a temporary workaround (look for # XXX-WORKAROUND-PGSQL). # result = subdb.get(flt_params.flt, limit=flt_params.limit, # skip=flt_params.skip, sort=flt_params.sortby) result = subdb.get(flt_params.flt, skip=flt_params.skip, sort=flt_params.sortby) if flt_params.unused: msg = 'Option%s not understood: %s' % ( 's' if len(flt_params.unused) > 1 else '', ', '.join(flt_params.unused), ) if flt_params.callback is not None: yield webutils.js_alert("param-unused", "warning", msg) utils.LOGGER.warning(msg) elif flt_params.callback is not None: yield webutils.js_del_alert("param-unused") if config.DEBUG: msg1 = "filter: %s" % subdb.flt2str(flt_params.flt) msg2 = "user: %r" % webutils.get_user() utils.LOGGER.debug(msg1) utils.LOGGER.debug(msg2) if flt_params.callback is not None: yield webutils.js_alert("filter", "info", msg1) yield webutils.js_alert("user", "info", msg2) version_mismatch = {} if flt_params.callback is None: yield "[\n" else: yield "%s([\n" % flt_params.callback # XXX-WORKAROUND-PGSQL # for rec in result: for i, rec in enumerate(result): for fld in ['_id', 'scanid']: try: del rec[fld] except KeyError: pass if not flt_params.ipsasnumbers: rec['addr'] = utils.force_int2ip(rec['addr']) for field in ['starttime', 'endtime']: if field in rec: if not flt_params.datesasstrings: rec[field] = int(utils.datetime2timestamp(rec[field])) for port in rec.get('ports', []): if 'screendata' in port: port['screendata'] = utils.encode_b64(port['screendata']) for script in port.get('scripts', []): if "masscan" in script: try: del script['masscan']['raw'] except KeyError: pass if not flt_params.ipsasnumbers: if 'traces' in rec: for trace in rec['traces']: trace['hops'].sort(key=lambda x: x['ttl']) for hop in trace['hops']: hop['ipaddr'] = utils.force_int2ip(hop['ipaddr']) addresses = rec.get('addresses', {}).get('mac') if addresses: newaddresses = [] for addr in addresses: manuf = utils.mac2manuf(addr) if manuf and manuf[0]: newaddresses.append({'addr': addr, 'manuf': manuf[0]}) else: newaddresses.append({'addr': addr}) rec['addresses']['mac'] = newaddresses yield "%s\t%s" % ('' if i == 0 else ',\n', json.dumps(rec, default=utils.serialize)) check = subdb.cmp_schema_version_host(rec) if check: version_mismatch[check] = version_mismatch.get(check, 0) + 1 # XXX-WORKAROUND-PGSQL if i + 1 >= flt_params.limit: break if flt_params.callback is None: yield "\n]\n" else: yield "\n]);\n" messages = { 1: lambda count: ("%d document%s displayed %s out-of-date. Please run " "the following command: 'ivre %s " "--update-schema;" % (count, 's' if count > 1 else '', 'are' if count > 1 else 'is', subdb_tool)), -1: lambda count: ('%d document%s displayed ha%s been inserted by ' 'a more recent version of IVRE. Please update ' 'IVRE!' % (count, 's' if count > 1 else '', 've' if count > 1 else 's')), } for mismatch, count in viewitems(version_mismatch): message = messages[mismatch](count) if flt_params.callback is not None: yield webutils.js_alert( "version-mismatch-%d" % ((mismatch + 1) // 2), "warning", message) utils.LOGGER.warning(message)