def test_to_json(self): i = Info('Blind SQL injection vulnerability', MockInfo.LONG_DESC, 1, 'plugin_name') i['test'] = 'foo' i.add_to_highlight('abc', 'def') jd = i.to_json() json_string = json.dumps(jd) jd = json.loads(json_string) self.assertEqual(jd['name'], i.get_name()) self.assertEqual(jd['url'], str(i.get_url())) self.assertEqual(jd['var'], i.get_token_name()) self.assertEqual(jd['response_ids'], i.get_id()) self.assertEqual(jd['vulndb_id'], i.get_vulndb_id()) self.assertEqual(jd['desc'], i.get_desc(with_id=False)) self.assertEqual(jd['long_description'], i.get_long_description()) self.assertEqual(jd['fix_guidance'], i.get_fix_guidance()) self.assertEqual(jd['fix_effort'], i.get_fix_effort()) self.assertEqual(jd['tags'], i.get_tags()) self.assertEqual(jd['wasc_ids'], i.get_wasc_ids()) self.assertEqual(jd['wasc_urls'], list(i.get_wasc_urls())) self.assertEqual(jd['cwe_urls'], list(i.get_cwe_urls())) self.assertEqual(jd['references'], BLIND_SQLI_REFS) self.assertEqual(jd['owasp_top_10_references'], BLIND_SQLI_TOP10_REFS) self.assertEqual(jd['plugin_name'], i.get_plugin_name()) self.assertEqual(jd['severity'], i.get_severity()) self.assertEqual(jd['attributes'], i.copy()) self.assertEqual(jd['highlight'], list(i.get_to_highlight()))
def test_to_json(self): i = Info("Blind SQL injection vulnerability", MockInfo.LONG_DESC, 1, "plugin_name") i["test"] = "foo" i.add_to_highlight("abc", "def") jd = i.to_json() json_string = json.dumps(jd) jd = json.loads(json_string) self.assertEqual(jd["name"], i.get_name()) self.assertEqual(jd["url"], str(i.get_url())) self.assertEqual(jd["var"], i.get_token_name()) self.assertEqual(jd["response_ids"], i.get_id()) self.assertEqual(jd["vulndb_id"], i.get_vulndb_id()) self.assertEqual(jd["desc"], i.get_desc(with_id=False)) self.assertEqual(jd["long_description"], i.get_long_description()) self.assertEqual(jd["fix_guidance"], i.get_fix_guidance()) self.assertEqual(jd["fix_effort"], i.get_fix_effort()) self.assertEqual(jd["tags"], i.get_tags()) self.assertEqual(jd["wasc_ids"], i.get_wasc_ids()) self.assertEqual(jd["wasc_urls"], list(i.get_wasc_urls())) self.assertEqual(jd["cwe_urls"], list(i.get_cwe_urls())) self.assertEqual(jd["references"], BLIND_SQLI_REFS) self.assertEqual(jd["owasp_top_10_references"], BLIND_SQLI_TOP10_REFS) self.assertEqual(jd["plugin_name"], i.get_plugin_name()) self.assertEqual(jd["severity"], i.get_severity()) self.assertEqual(jd["attributes"], i.copy()) self.assertEqual(jd["highlight"], list(i.get_to_highlight()))
def _grep_worker(self, request, response, kb_key, domain=None): """ Helper method for using in self.grep() :param request: The HTTP request :param response: The HTTP response :param kb_key: Knowledge base dict key :param domain: Target domain for get_emails filter :return: None """ try: dp = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: msg = 'Failed to get document parser for "%s" at get_emails.' om.out.debug(msg % response.get_url()) return emails = dp.get_emails(domain) for mail_address in emails: # Reduce false positives if request.sent(mail_address): continue # Email address are case insensitive mail_address = mail_address.lower() url = response.get_url() email_map = {} for info_obj in kb.kb.get('emails', 'emails'): mail_string = info_obj['mail'] email_map[mail_string] = info_obj if mail_address not in email_map: # Create a new info object, and report it desc = 'The mail account: "%s" was found in: \n- %s'\ ' - In request with id: %s.' desc = desc % (mail_address, url, response.id) i = Info('Exposed email address', desc, response.id, self.get_name()) i.set_url(url) i['mail'] = mail_address i['url_list'] = set([ url, ]) i['user'] = mail_address.split('@')[0] i.add_to_highlight(mail_address) self.kb_append('emails', kb_key, i) else: # Get the corresponding info object. i = email_map[mail_address] # And work if url not in i['url_list']: # This email was already found in some other URL # I'm just going to modify the url_list and the description # message of the information object. id_list_of_info = i.get_id() id_list_of_info.append(response.id) i.set_id(id_list_of_info) i.set_url(url) desc = i.get_desc() desc += '\n- %s - In request with id: %s.' desc = desc % (url, response.id) i.set_desc(desc) i['url_list'].add(url)
def _grep_worker(self, request, response, kb_key, domain=None): """ Helper method for using in self.grep() :param request: The HTTP request :param response: The HTTP response :param kb_key: Knowledge base dict key :param domain: Target domain for get_emails filter :return: None """ try: dp = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: msg = 'If I can\'t parse the document, I won\'t be able to find'\ ' any emails. Ignoring the response for "%s".' om.out.debug(msg % response.get_url()) return emails = dp.get_emails(domain) for mail_address in emails: # Reduce false positives if request.sent(mail_address): continue # Email address are case insensitive mail_address = mail_address.lower() url = response.get_url() email_map = {} for info_obj in kb.kb.get('emails', 'emails'): mail_string = info_obj['mail'] email_map[mail_string] = info_obj if mail_address not in email_map: # Create a new info object, and report it desc = 'The mail account: "%s" was found in: \n- %s'\ ' - In request with id: %s.' desc = desc % (mail_address, url, response.id) i = Info('Exposed email address', desc, response.id, self.get_name()) i.set_url(url) i['mail'] = mail_address i['url_list'] = set([url,]) i['user'] = mail_address.split('@')[0] i.add_to_highlight(mail_address) self.kb_append('emails', kb_key, i) else: # Get the corresponding info object. i = email_map[mail_address] # And work if url not in i['url_list']: # This email was already found in some other URL # I'm just going to modify the url_list and the description # message of the information object. id_list_of_info = i.get_id() id_list_of_info.append(response.id) i.set_id(id_list_of_info) i.set_url(url) desc = i.get_desc() desc += '\n- %s - In request with id: %s.' desc = desc % (url, response.id) i.set_desc(desc) i['url_list'].add(url)