def test_empty_input(): assert to_utf8(u"ÑAAA".encode("UTF-16")) == "\xff\xfe\xd1\x00A\x00A\x00A\x00" assert to_utf8(u"ÑAAA".encode("UTF-8")) == "\xc3\x91AAA" assert to_utf8(u"ÑAAA".encode("iso-8859-15")) == "\xd1AAA" assert to_utf8(u"ÑAAA".encode("iso-8859-1")) == "\xd1AAA" assert to_utf8(u"ÑAAA".encode("latin1")) == "\xd1AAA" assert to_utf8(0) == 0 assert to_utf8(None) is None assert to_utf8([]) == [] assert to_utf8(bin(0101)) == '0b1000001'
def run(self, info): #if not info.has_url_params and not info.has_post_params: # return m_return = [] #TODO 30X redirect #TODO Content-Type p = get_request(url = info, allow_redirects=False) if (p.status == '301' or p.status == '302') and not p.headers.get('Location'): return m_return if p.content_type is not None and re.search('(application\/json)|(application\/javascript)|(text\/json)|(text\/javascript)|' '(application\/x-javascript)|(application\/octet-stream)|(text\/xml)|(application\/xml)', p.content_type) is not None: return m_return m_url = info if info.has_url_params: for k,v in m_url.url_params.iteritems(): key = to_utf8(k) value = to_utf8(v) if self.xss_detect(m_url, method = 'GET', k = key, v = value): url = URL(url = m_url.url, method = 'GET', post_params = None, referer = m_url.referer) vul = XSS(url, vulnerable_params = {"injection":"xxxxxx"}, injection_point = XSS.INJECTION_POINT_URL, injection_type = "XSS") vul.description += "f**k" m_return.append(vul) break #return m_return if info.has_post_params: print 'POST' # Send the results return m_return
def __format_rst(self, obj, hyperlinks = False, width = 70): if hyperlinks: return "\n".join("`ID: %s`_" % x for x in obj) if isinstance(obj, basestring): obj = str(obj) if any(ord(c) > 127 for c in obj): obj = hexdump(obj) elif width: obj = "\n".join(wrap(obj, width, replace_whitespace=False, expand_tabs=False, drop_whitespace=False)) return self.__escape_rst(obj) if ( (isinstance(obj, list) or isinstance(obj, tuple)) and all(isinstance(x, basestring) for x in obj) ): return "\n".join("- " + self.__escape_rst( to_utf8(x) if isinstance(x, basestring) else pformat(x) ) for x in obj) if isinstance(obj, dict): return "\n".join( self.__escape_rst("%s: %s" % (k,v)) for k,v in obj.iteritems()) try: text = str(obj) except Exception: text = pformat(obj) return self.__escape_rst(text)
def run(self, info): m_return = [] m_url = info.url cookie_param = None cookie_dict = Config.audit_config.cookie if cookie_dict != None: if hasattr(cookie_dict, "iteritems"): cookie_params = { to_utf8(k): to_utf8(v) for k, v in cookie_dict.iteritems() } cookie_param = ';'.join( '%s=%s' % (k ,v) for (k, v) in sorted(cookie_params.iteritems()) ) __ = start_wvs_spider_dispatch(m_url, cookie_param, Logger) #__ = test_start_wvs_spider_dispatch('www.bbktel.com.cn_d2cc49d948a8589628d260faa6ba41a4') json_content = json.loads(__) for urls in json_content['info']: #print item Logger.log_verbose("Web Spider:found url %s" % urls['fullurl']) m_resource = URL(url = urls['fullurl']) m_return.append(m_resource) for item_url in urls['content']: post_param = item_url['param_data'] if "AcunetixBoundary_" in post_param: #multipart/form-data method = 'FILE_UPLOAD' print method else: method = item_url['method'] if method == "POST": post_param_dict = argument_query(item_url['param_data']) m_resource = URL(url = item_url['url'], method = "POST", post_params = post_param_dict, referer= urls['fullurl']) else: m_resource = URL(url = item_url['url'], method = method, referer = urls['fullurl']) Logger.log_verbose("Web Spider:found url %s" % item_url['url']) m_return.append(m_resource) # Send the results return m_return
def run(self, info): #if not info.has_url_params and not info.has_post_params: # return m_return = [] if info.has_url_params: cookie_dict = Config.audit_config.cookie print cookie_dict if hasattr(cookie_dict, "iteritems"): cookie_params = { to_utf8(k): to_utf8(v) for k, v in cookie_dict.iteritems() } cookie_param = ';'.join( '%s=%s' % (k ,v) for (k, v) in sorted(cookie_params.iteritems()) ) print cookie_param print "GET" ''' param_dict = info.url_params for k,v in param_dict.iteritems(): key = to_utf8(k) value = to_utf8(v) for any_file_read_case in any_file_read_detect_test_cases: p = payload_muntants(info, payload = {'k': k , 'pos': 1, 'payload':any_file_read_case['input'], 'type': 1}, bmethod = info.method) __ = re.search(any_file_read_case['target'], p.data) if __ is not None: print '[+] found any file read!' return m_return ''' if info.has_post_params: print 'POST' # Send the results return m_return
def run(self, info): #if not info.has_url_params and not info.has_post_params: # return m_return = [] if info.has_url_params: cookie_dict = Config.audit_config.cookie print cookie_dict if hasattr(cookie_dict, "iteritems"): cookie_params = { to_utf8(k): to_utf8(v) for k, v in cookie_dict.iteritems() } cookie_param = ';'.join( '%s=%s' % (k, v) for (k, v) in sorted(cookie_params.iteritems())) print cookie_param print "GET" ''' param_dict = info.url_params for k,v in param_dict.iteritems(): key = to_utf8(k) value = to_utf8(v) for any_file_read_case in any_file_read_detect_test_cases: p = payload_muntants(info, payload = {'k': k , 'pos': 1, 'payload':any_file_read_case['input'], 'type': 1}, bmethod = info.method) __ = re.search(any_file_read_case['target'], p.data) if __ is not None: print '[+] found any file read!' return m_return ''' if info.has_post_params: print 'POST' # Send the results return m_return
def run(self, info): m_return = [] if info.has_url_params: #param_dict = info.url_params for k,v in info.url_params.iteritems(): key = to_utf8(k) value = to_utf8(v) for cmd_inject_case in cmd_inject_detect_test_cases: p = payload_muntants(info, payload = {'k': k , 'pos': 1, 'payload':cmd_inject_case['input'], 'type': 0}, bmethod = info.method, timeout = 15.0) if cmd_inject_case['target'] is not None: if p is not None: __ = re.search(cmd_inject_case['target'], p.data) if __ is not None: Logger.log_verbose( '[+] found cmd inject!' ) return m_return if info.has_post_params: #param_dict = info.post_params for k,v in info.post_params.iteritems(): key = to_utf8(k) value = to_utf8(v) for cmd_inject_case in cmd_inject_detect_test_cases: p = payload_muntants(info, payload = {'k': k , 'pos': 1, 'payload':cmd_inject_case['input'], 'type': 0}, bmethod = info.method, timeout = 15.0) if cmd_inject_case['target'] is not None: if p is not None: __ = re.search(cmd_inject_case['target'], p.data) if __ is not None: Logger.log_verbose( '[+] found cmd inject!' ) return m_return # Send the results return m_return
class ShodanPlugin(TestingPlugin): """ This plugin tries to perform passive reconnaissance on a target using the Shodan web API. """ #-------------------------------------------------------------------------- def check_params(self): # Make sure we have an API key. self.get_api_key() #-------------------------------------------------------------------------- def get_accepted_types(self): return [IP] #-------------------------------------------------------------------------- def get_api_key(self): key = Config.plugin_args.get("apikey", None) if not key: key = Config.plugin_config.get("apikey", None) if not key: raise ValueError( "Missing API key! Get one at:" " http://www.shodanhq.com/api_doc") return key #-------------------------------------------------------------------------- def run(self, info): # This is where we'll collect the data we'll return. results = [] # Skip unsupported IP addresses. if info.version != 4: return ip = info.address parsed = netaddr.IPAddress(ip) if parsed.is_loopback() or \ parsed.is_private() or \ parsed.is_link_local(): return # Query Shodan for this host. try: key = self.get_api_key() api = WebAPI(key) shodan = api.host(ip) except Exception, e: tb = traceback.format_exc() Logger.log_error("Error querying Shodan for host %s: %s" % (ip, str(e))) Logger.log_error_more_verbose(tb) return # Make sure we got the same IP address we asked for. if ip != shodan.get("ip", ip): Logger.log_error( "Shodan gave us a different IP address... weird!") Logger.log_error_verbose( "Old IP: %s - New IP: %s" % (ip, shodan["ip"])) ip = to_utf8( shodan["ip"] ) info = IP(ip) results.append(info) # Extract all hostnames and link them to this IP address. # Note: sometimes Shodan sends IP addresses here! (?) seen_host = {} for hostname in shodan.get("hostnames", []): if hostname == ip: continue if hostname in seen_host: domain = seen_host[hostname] else: try: try: host = IP(hostname) except ValueError: host = Domain(hostname) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) seen_host[hostname] = host results.append(host) domain = host domain.add_resource(info) # Get the OS fingerprint, if available. os = to_utf8( shodan.get("os") ) if os: Logger.log("Host %s is running %s" % (ip, os)) pass # XXX TODO we'll need to reverse lookup the CPE # Get the GPS data, if available. # Complete any missing data using the default values. try: latitude = float( shodan["latitude"] ) longitude = float( shodan["longitude"] ) except Exception: latitude = None longitude = None if latitude is not None and longitude is not None: area_code = shodan.get("area_code") if not area_code: area_code = None else: area_code = str(area_code) country_code = shodan.get("country_code") if not country_code: country_code = shodan.get("country_code3") if not country_code: country_code = None else: country_code = str(country_code) else: country_code = str(country_code) country_name = shodan.get("country_name") if not country_name: country_name = None city = shodan.get("city") if not city: city = None dma_code = shodan.get("dma_code") if not dma_code: dma_code = None else: dma_code = str(dma_code) postal_code = shodan.get("postal_code") if not postal_code: postal_code = None else: postal_code = str(postal_code) region_name = shodan.get("region_name") if not region_name: region_name = None geoip = Geolocation( latitude, longitude, country_code = country_code, country_name = country_name, region_name = region_name, city = city, zipcode = postal_code, metro_code = dma_code, area_code = area_code, ) results.append(geoip) geoip.add_resource(info) # Go through every result and pick only the latest ones. latest = {} for data in shodan.get("data", []): if ( not "banner" in data or not "ip" in data or not "port" in data or not "timestamp" in data ): Logger.log_error("Malformed results from Shodan?") from pprint import pformat Logger.log_error_more_verbose(pformat(data)) continue key = ( data["ip"], data["port"], data["banner"], ) try: timestamp = reversed( # DD.MM.YYYY -> (YYYY, MM, DD) map(int, data["timestamp"].split(".", 2))) except Exception: continue if key not in latest or timestamp > latest[key][0]: latest[key] = (timestamp, data) # Process the latest results. seen_isp_or_org = set() seen_html = set() for _, data in latest.values(): # Extract all domains, but don't link them. for hostname in data.get("domains", []): if hostname not in seen_host: try: domain = Domain(hostname) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) continue seen_host[hostname] = domain results.append(domain) # We don't have any use for this information yet, # but log it so at least the user can see it. isp = to_utf8( data.get("isp") ) org = to_utf8( data.get("org") ) if org and org not in seen_isp_or_org: seen_isp_or_org.add(org) Logger.log_verbose( "Host %s belongs to: %s" % (ip, org) ) if isp and (not org or isp != org) and isp not in seen_isp_or_org: seen_isp_or_org.add(isp) Logger.log_verbose( "IP address %s is provided by ISP: %s" % (ip, isp) ) # Get the HTML content, if available. raw_html = to_utf8( data.get("html") ) if raw_html: hash_raw_html = hash(raw_html) if hash_raw_html not in seen_html: seen_html.add(hash_raw_html) try: html = HTML(raw_html) except Exception: html = None tb = traceback.format_exc() Logger.log_error_more_verbose(tb) if html: html.add_resource(info) results.append(html) # Get the banner, if available. raw_banner = to_utf8( data.get("banner") ) try: port = int( data.get("port", "0") ) except Exception: port = 0 if raw_banner and port: try: banner = Banner(info, raw_banner, port) except Exception: banner = None tb = traceback.format_exc() Logger.log_error_more_verbose(tb) if banner: results.append(banner) # Was this host located somewhere else in the past? for data in reversed(shodan.get("data", [])): try: timestamp = reversed( # DD.MM.YYYY -> (YYYY, MM, DD) map(int, data["timestamp"].split(".", 2))) old_location = data.get("location") if old_location: old_latitude = old_location.get("latitude", latitude) old_longitude = old_location.get("longitude", longitude) if ( old_latitude is not None and old_longitude is not None and (old_latitude != latitude or old_longitude != longitude) ): # Get the geoip information. area_code = old_location.get("area_code") if not area_code: area_code = None country_code = old_location.get("country_code") if not country_code: country_code = old_location.get("country_code3") if not country_code: country_code = None country_name = old_location.get("country_name") if not country_name: country_name = None city = old_location.get("city") if not city: city = None postal_code = old_location.get("postal_code") if not postal_code: postal_code = None region_name = old_location.get("region_name") if not region_name: region_name = None geoip = Geolocation( latitude, longitude, country_code = country_code, country_name = country_name, region_name = region_name, city = city, zipcode = postal_code, area_code = area_code, ) # If this is the first time we geolocate this IP, # use this information as it if were up to date. if latitude is None or longitude is None: latitude = old_latitude longitude = old_longitude results.append(geoip) geoip.add_resource(info) # Otherwise, just log the event. else: discard_data(geoip) where = str(geoip) when = datetime.date(*timestamp) msg = "Host %s used to be located at %s on %s." msg %= (ip, where, when.strftime("%B %d, %Y")) Logger.log_verbose(msg) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) # Return the results. return results
def generate_report(self, output_file): Logger.log_verbose( "Writing CSV report to file: %s" % output_file) # All rows have the same format but the first. # There's always 26 columns in every row. # Most columns are for Vulnerability objects, empty for other types. # Read the source code for more details, it's really simple. :) # Open the output file. with open(output_file, "w") as f: writer = csv.writer(f) # Write the first row, describing the report itself. report_time = datetime.utcnow() start_time, stop_time, run_time = parse_audit_times( *get_audit_times()) row = [ "GoLismero " + VERSION, 1, # format version Config.audit_name, start_time, stop_time, run_time, report_time, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, pickle.dumps(Config.audit_config, protocol=0).encode("hex"), pickle.dumps(Config.audit_scope, protocol=0).encode("hex"), ] row = [to_utf8(x) if x is not None else "" for x in row] writer.writerow(row) # Used to convert the false_positive flag to a string value. fp = { True: 1, False: 0, None: -1, } # Just the vulnerabilities? if Config.audit_config.only_vulns: # Dump only Vulnerability objects that are not false positives. for vuln in self.__iterate_data( data_type=Data.TYPE_VULNERABILITY): if vuln.false_positive: continue target = vuln.target row = [ vuln.identity, vuln.data_type, vuln.data_subtype, None, vuln.display_name, vuln.plugin_id, vuln.tool_id, vuln.custom_id, vuln.level, vuln.risk, vuln.severity, vuln.impact, vuln.cvss_base, vuln.cvss_score, vuln.cvss_vector, fp[vuln.false_positive], target.identity, target.display_name, vuln.title, vuln.description, vuln.solution, "\n".join(vuln.references), "\n".join(vuln.taxonomies), str(target), pickle.dumps(vuln, protocol=0).encode("hex"), pickle.dumps(target, protocol=0).encode("hex"), ] row = [to_utf8(x) if x is not None else "" for x in row] writer.writerow(row) # Full database dump? else: # Dump all objects in the database. for data in self.__iterate_data(): if data.data_type == Data.TYPE_VULNERABILITY: vuln = data target = vuln.target row = [ vuln.identity, vuln.data_type, vuln.data_subtype, None, vuln.display_name, vuln.plugin_id, vuln.tool_id, vuln.custom_id, vuln.level, vuln.risk, vuln.severity, vuln.impact, vuln.cvss_base, vuln.cvss_score, vuln.cvss_vector, fp[vuln.false_positive], target.identity, target.display_name, vuln.title, vuln.description, vuln.solution, "\n".join(vuln.references), "\n".join(vuln.taxonomies), str(target), pickle.dumps(vuln, protocol=0).encode("hex"), pickle.dumps(target, protocol=0).encode("hex"), ] else: row = [ data.identity, data.data_type, data.data_subtype, getattr(data, "category", None), data.display_name, None, None, None, None, None, None, None, None, None, None, 0, None, None, None, None, None, None, None, str(data), None, pickle.dumps(data, protocol=0).encode("hex"), ] row = [to_utf8(x) if x is not None else "" for x in row] writer.writerow(row)
def run(self, info): # Query PunkSPIDER. host_id = info.hostname host_id = parse_url(host_id).hostname host_id = ".".join(reversed(host_id.split("."))) d = self.query_punkspider(host_id) # Stop if we have no results. if not d: Logger.log("No results found for host: %s" % info.hostname) return # This is where we'll collect the data we'll return. results = [] # For each vulnerability... for v in d["data"]: try: # Future-proof checks. if v["protocol"] not in ("http", "https"): Logger.log_more_verbose( "Skipped non-web vulnerability: %s" % to_utf8(v["id"])) continue if v["bugType"] not in ("xss", "sqli", "bsqli"): Logger.log_more_verbose( "Skipped unknown vulnerability type: %s" % to_utf8(v["bugType"])) continue # Get the vulnerable URL, parameter and payload. url = to_utf8(v["vulnerabilityUrl"]) param = to_utf8(v["parameter"]) parsed = parse_url(url) payload = parsed.query_params[param] # Get the level. level = to_utf8(v["level"]) # Create the URL object. url_o = URL(url) results.append(url_o) # Get the vulnerability class. if v["bugType"] == "xss": clazz = XSS else: clazz = SQLInjection # Create the Vulnerability object. vuln = clazz( url_o, vulnerable_params={param: payload}, injection_point=clazz.INJECTION_POINT_URL, injection_type=to_utf8(v["bugType"]), # FIXME level=level, tool_id=to_utf8(v["id"]), ) print '------------' print vuln print type(vuln) print '------------' results.append(vuln) # Log errors. except Exception, e: tb = traceback.format_exc() Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(tb)
def deal_param_payload(self, sql_detect_type, url, method = 'GET', **kwargs): ''' insert payload into param :return: ''' if not isinstance(sql_detect_type, str): raise TypeError("Expected sql_detect_type string, type:%s" % type(sql_detect_type)) if not isinstance(url, URL): raise TypeError("Expected url type, type:%s" % type(url)) #if not isinstance(param_dict, dict): # raise TypeError("Expected param_dict string, type:%s" % type(param_dict)) if method == 'GET': param_dict = url.url_params elif method == 'POST': param_dict = url.post_params is_timing_stable = True short_duration = 1 def __check_if_rsp_stable_on_orig_input(): p = get_request(url = url, allow_redirects=False) if p.status != '200': is_timing_stable = False orig_first_time = p.elapsed orig_first_resp_body = p.data time.sleep(2) p = get_request(url = url, allow_redirects=False) if p.status != '200': is_timing_stable = False orig_second_time = p.elapsed orig_second_resp_body = p.data min_resp_time = min(orig_first_time, orig_second_time) max_resp_time = max(orig_first_time, orig_second_time) short_duration = max(RSP_SHORT_DURATION, max_resp_time) + 1 long_duration = short_duration * 2 if (max_resp_time - min_resp_time) > short_duration: is_timing_stable = False else: is_timing_stable = True if orig_first_resp_body != orig_second_resp_body: is_timing_stable = False def __check_if_rsp_stable_on_invalid_input(): #TODO judge url is stable is_timing_stable = True #__check_if_rsp_stable_on_orig_input() if sql_detect_type == "ERR_MSG_DETECT": for k,v in param_dict.iteritems(): key = to_utf8(k) value = to_utf8(v) for test_case_dict in sql_inject_detect_err_msg_test_cases: p = payload_muntants(url, payload = {'k': k , 'pos': 1, 'payload':test_case_dict['input'], 'type': 0}, bmethod = method) if self._err_msg_sql_detect(p, test_case_dict['target']): #print '[+] found sql inject in url:{0}, payload:{1}'.format(req_uri, payload_param_dict) print '[+] found sql inject!' return True elif sql_detect_type == 'ORDER_BY_DETECT': for k, v in param_dict.iteritems(): key = to_utf8(k) value = to_utf8(v) if self._orderby_sql_detect(k = key, v = value , url = url, method = method): print '[+] found order by sql inject!' return True elif sql_detect_type == "ECHO_DETECT": self._echo_sql_detect() elif sql_detect_type == "BOOLEAN_DETECT": print '----------- BOOLEAN_DETECT -----------------------' for k, v in param_dict.iteritems(): key = to_utf8(k) value = to_utf8(v) self._boolean_sql_detect(k = key, v = value , url = url, method = method) elif sql_detect_type == "TIMING_DETECT": print '-----------TIMING_DETECT -----------------------' if is_timing_stable == True: for k, v in param_dict.iteritems(): key = to_utf8(k) value = to_utf8(v) if self._timing_sql_detect(k = k, v = value, url = url, method = method, short_duration = short_duration): print '[+] found time_based sql inject!' return True
def run(self, info): # Query PunkSPIDER. host_id = info.hostname host_id = parse_url(host_id).hostname host_id = ".".join(reversed(host_id.split("."))) d = self.query_punkspider(host_id) # Stop if we have no results. if not d: Logger.log("No results found for host: %s" % info.hostname) return # This is where we'll collect the data we'll return. results = [] # For each vulnerability... for v in d["data"]: try: # Future-proof checks. if v["protocol"] not in ("http", "https"): Logger.log_more_verbose( "Skipped non-web vulnerability: %s" % to_utf8(v["id"])) continue if v["bugType"] not in ("xss", "sqli", "bsqli"): Logger.log_more_verbose( "Skipped unknown vulnerability type: %s" % to_utf8(v["bugType"])) continue # Get the vulnerable URL, parameter and payload. url = to_utf8(v["vulnerabilityUrl"]) param = to_utf8(v["parameter"]) parsed = parse_url(url) payload = parsed.query_params[param] # Get the level. level = to_utf8(v["level"]) # Create the URL object. url_o = URL(url) results.append(url_o) # Get the vulnerability class. if v["bugType"] == "xss": clazz = XSS else: clazz = SQLInjection # Create the Vulnerability object. vuln = clazz( url_o, vulnerable_params = { param: payload }, injection_point = clazz.INJECTION_POINT_URL, injection_type = to_utf8(v["bugType"]), # FIXME level = level, tool_id = to_utf8(v["id"]), ) results.append(vuln) # Log errors. except Exception, e: tb = traceback.format_exc() Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(tb)
def generate_report(self, output_file): Logger.log_verbose("Writing CSV report to file: %s" % output_file) # All rows have the same format but the first. # There's always 26 columns in every row. # Most columns are for Vulnerability objects, empty for other types. # Read the source code for more details, it's really simple. :) # Open the output file. with open(output_file, "w") as f: writer = csv.writer(f) # Write the first row, describing the report itself. report_time = datetime.utcnow() start_time, stop_time, run_time = parse_audit_times( *get_audit_times()) row = [ "GoLismero " + VERSION, 1, # format version Config.audit_name, start_time, stop_time, run_time, report_time, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, pickle.dumps(Config.audit_config, protocol=0).encode("hex"), pickle.dumps(Config.audit_scope, protocol=0).encode("hex"), ] row = [to_utf8(x) if x is not None else "" for x in row] writer.writerow(row) # Used to convert the false_positive flag to a string value. fp = { True: 1, False: 0, None: -1, } # Just the vulnerabilities? if Config.audit_config.only_vulns: # Dump only Vulnerability objects that are not false positives. for vuln in self.__iterate_data( data_type=Data.TYPE_VULNERABILITY): if vuln.false_positive: continue target = vuln.target row = [ vuln.identity, vuln.data_type, vuln.data_subtype, None, vuln.display_name, vuln.plugin_id, vuln.tool_id, vuln.custom_id, vuln.level, vuln.risk, vuln.severity, vuln.impact, vuln.cvss_base, vuln.cvss_score, vuln.cvss_vector, fp[vuln.false_positive], target.identity, target.display_name, vuln.title, vuln.description, vuln.solution, "\n".join(vuln.references), "\n".join(vuln.taxonomies), str(target), pickle.dumps(vuln, protocol=0).encode("hex"), pickle.dumps(target, protocol=0).encode("hex"), ] row = [to_utf8(x) if x is not None else "" for x in row] writer.writerow(row) # Full database dump? else: # Dump all objects in the database. for data in self.__iterate_data(): if data.data_type == Data.TYPE_VULNERABILITY: vuln = data target = vuln.target row = [ vuln.identity, vuln.data_type, vuln.data_subtype, None, vuln.display_name, vuln.plugin_id, vuln.tool_id, vuln.custom_id, vuln.level, vuln.risk, vuln.severity, vuln.impact, vuln.cvss_base, vuln.cvss_score, vuln.cvss_vector, fp[vuln.false_positive], target.identity, target.display_name, vuln.title, vuln.description, vuln.solution, "\n".join(vuln.references), "\n".join(vuln.taxonomies), str(target), pickle.dumps(vuln, protocol=0).encode("hex"), pickle.dumps(target, protocol=0).encode("hex"), ] else: row = [ data.identity, data.data_type, data.data_subtype, getattr(data, "category", None), data.display_name, None, None, None, None, None, None, None, None, None, None, 0, None, None, None, None, None, None, None, str(data), None, pickle.dumps(data, protocol=0).encode("hex"), ] row = [to_utf8(x) if x is not None else "" for x in row] writer.writerow(row)