def report(scantask): """Displays detailed report information to the user. """ if scantask.out: out = open(scantask.out, 'a') else: out = sys.stdout clues = scantask.analyzed hits = analysis.hits(clues) logger = Halberd.logger.getLogger() # xxx This could be passed by the caller in order to avoid recomputation in # case the clues needed a re-analysis. diff_fields = analysis.diff_fields(clues) out.write('=' * 70 + '\n') out.write('%s' % scantask.url) if scantask.addr: out.write(' (%s)' % scantask.addr) out.write(': %d real server(s)\n' % len(clues)) out.write('=' * 70 + '\n') for num, clue in enumerate(clues): assert hits > 0 info = clue.info out.write('\n') # out.write('-' * 70 + '\n') out.write('server %d: %s\n' % (num + 1, info['server'].lstrip())) out.write('-' * 70 + '\n\n') out.write('difference: %d seconds\n' % clue.diff) out.write('successful requests: %d hits (%.2f%%)\n' \ % (clue.getCount(), clue.getCount() * 100 / float(hits))) if info['contloc']: out.write('content-location: %s\n' % info['contloc'].lstrip()) if len(info['cookies']) > 0: out.write('cookie(s):\n') for cookie in info['cookies']: out.write(' %s\n' % cookie.lstrip()) out.write('header fingerprint: %s\n' % info['digest']) different = [(field, value) for field, value in clue.headers \ if field in diff_fields] if different: out.write('different headers:\n') idx = 1 for field, value in different: out.write(' %d. %s:%s\n' % (idx, field, value)) idx += 1 if scantask.debug: import pprint out.write('headers:\n') pprint.pprint(clue.headers, out)
def report(scantask): """Displays detailed report information to the user. """ if scantask.out: out = open(scantask.out, 'a') else: out = sys.stdout clues = scantask.analyzed hits = analysis.hits(clues) logger = Halberd.logger.getLogger() # xxx This could be passed by the caller in order to avoid recomputation in # case the clues needed a re-analysis. diff_fields = analysis.diff_fields(clues) out.write('=' * 70 + '\n') out.write('%s' % scantask.url) if scantask.addr: out.write(' (%s)' % scantask.addr) out.write(': %d real server(s)\n' % len(clues)) out.write('=' * 70 + '\n') for num, clue in enumerate(clues): assert hits > 0 info = clue.info out.write('\n') # out.write('-' * 70 + '\n') out.write('server %d: %s\n' % (num + 1, info['server'].lstrip())) out.write('-' * 70 + '\n\n') out.write('difference: %d seconds\n' % clue.diff) out.write('successful requests: %d hits (%.2f%%)\n' % (clue.get_count(), clue.get_count() * 100 / float(hits))) if info['contloc']: out.write('content-location: %s\n' % info['contloc'].lstrip()) if len(info['cookies']) > 0: out.write('cookie(s):\n') for cookie in info['cookies']: out.write(' %s\n' % cookie.lstrip()) out.write('header fingerprint: %s\n' % info['digest']) different = [(field, value) for field, value in clue.headers if field in diff_fields] if different: out.write('different headers:\n') idx = 1 for field, value in different: out.write(' %d. %s:%s\n' % (idx, field, value)) idx += 1 if scantask.debug: import pprint out.write('headers:\n') pprint.pprint(clue.headers, out)
def _report( self, scantask): """ Displays detailed report information to the user and save the data to the kb. """ if len(scantask.analyzed) == 1: om.out.information('The site: ' + scantask.url + " doesn't seem to have a HTTP load balancer configuration.") else: clues = scantask.analyzed hits = halberd_analysis.hits(clues) # xxx This could be passed by the caller in order to avoid recomputation in # case the clues needed a re-analysis. diff_fields = halberd_analysis.diff_fields(clues) om.out.information('=' * 70 ) om.out.information('%s' % scantask.url, newLine=False) if scantask.addr: om.out.information(' (%s)' % scantask.addr, newLine=False) om.out.information(': %d real server(s)' % len(clues) ) om.out.information('=' * 70 ) for num, clue in enumerate(clues): assert hits > 0 info = clue.info om.out.information('') om.out.information('server %d: %s' % (num + 1, info['server'].lstrip())) om.out.information('-' * 70 + '\n') # This is added so other w3af plugins can read the halberd results. # If needed by other plugins, I could fill up the info object with more # data about the different headers, time, etc... i = infokb.info() i['server'] = info['server'].lstrip() i['serverNumber'] = num +1 kb.kb.append( self, 'halberd', i ) om.out.information('difference: %d seconds' % clue.diff) om.out.information('successful requests: %d hits (%.2f%%)' \ % (clue.getCount(), clue.getCount() * 100 / float(hits))) if info['contloc']: om.out.information('content-location: %s' % info['contloc'].lstrip()) if len(info['cookies']) > 0: om.out.information('cookie(s):') for cookie in info['cookies']: om.out.information(' %s' % cookie.lstrip()) om.out.information('header fingerprint: %s' % info['digest']) different = [(field, value) for field, value in clue.headers \ if field in diff_fields] if different: om.out.information('different headers:') idx = 1 for field, value in different: om.out.information(' %d. %s:%s' % (idx, field, value)) idx += 1 if scantask.debug: import pprint import StringIO tmp = StringIO.StringIO() om.out.information('headers:') pprint.pprint(clue.headers, stream=tmp, indent=2) om.out.information( tmp ) om.out.information('\n')
def _report(self, scantask): """ Displays detailed report information to the user and save the data to the kb. :return: None. """ if len(scantask.analyzed) == 1: msg = '"%s" doesn\'t seem to have an HTTP load balancer'\ ' configuration.' om.out.information(msg % scantask.url) else: clues = scantask.analyzed hits = halberd_analysis.hits(clues) # xxx This could be passed by the caller in order to avoid # recomputation in case the clues needed a re-analysis. diff_fields = halberd_analysis.diff_fields(clues) desc = 'Target URL for HTTP load balancer detection: %s\n'\ 'Number of real server(s) detected: %d\n'\ 'Server information:\n %s' real_servers = ' %s\n' % scantask.addr desc = desc % (scantask.url, len(clues), real_servers) om.out.information(desc) for num, clue in enumerate(clues): assert hits > 0 clue_info = clue.info om.out.information('') om.out.information( 'server %d: %s' % (num + 1, clue_info['server'].lstrip())) om.out.information('-' * 70 + '\n') om.out.information('difference: %d seconds' % clue.diff) om.out.information('successful requests: %d hits (%.2f%%)' % (clue.get_count(), clue.get_count() * 100 / float(hits))) if clue_info['contloc']: om.out.information( 'content-location: %s' % clue_info['contloc'].lstrip()) if len(clue_info['cookies']) > 0: om.out.information('cookie(s):') for cookie in clue_info['cookies']: om.out.information(' %s' % cookie.lstrip()) om.out.information('header fingerprint: %s' % clue_info['digest']) different = [(field, value) for field, value in clue.headers if field in diff_fields] if different: om.out.information('different headers:') idx = 1 for field, value in different: om.out.information(' %d. %s:%s' % (idx, field, value)) idx += 1 if scantask.debug: tmp = StringIO.StringIO() om.out.information('headers:') pprint.pprint(clue.headers, stream=tmp, indent=2) om.out.information(tmp) om.out.information('\n') # This is added so other w3af plugins can read the halberd results. # If needed by other plugins, I could fill up the info object with more # data about the different headers, time, etc... i = Info('HTTP load balancer detected', desc, 1, self.get_name()) i['server'] = clue_info['server'].lstrip() i['server_number'] = len(clues) kb.kb.append(self, 'halberd', i)
def _report(self, scantask): """ Displays detailed report information to the user and save the data to the kb. :return: None. """ if len(scantask.analyzed) == 1: msg = '"%s" doesn\'t seem to have an HTTP load balancer'\ ' configuration.' om.out.information(msg % scantask.url) else: clues = scantask.analyzed hits = halberd_analysis.hits(clues) # xxx This could be passed by the caller in order to avoid # recomputation in case the clues needed a re-analysis. diff_fields = halberd_analysis.diff_fields(clues) desc = 'Target URL for HTTP load balancer detection: %s\n'\ 'Number of real server(s) detected: %d\n'\ 'Server information:\n %s' real_servers = ' %s\n' % scantask.addr desc = desc % (scantask.url, len(clues), real_servers) om.out.information(desc) for num, clue in enumerate(clues): assert hits > 0 clue_info = clue.info om.out.information('') om.out.information('server %d: %s' % (num + 1, clue_info['server'].lstrip())) om.out.information('-' * 70 + '\n') om.out.information('difference: %d seconds' % clue.diff) om.out.information( 'successful requests: %d hits (%.2f%%)' % (clue.get_count(), clue.get_count() * 100 / float(hits))) if clue_info['contloc']: om.out.information('content-location: %s' % clue_info['contloc'].lstrip()) if len(clue_info['cookies']) > 0: om.out.information('cookie(s):') for cookie in clue_info['cookies']: om.out.information(' %s' % cookie.lstrip()) om.out.information('header fingerprint: %s' % clue_info['digest']) different = [(field, value) for field, value in clue.headers if field in diff_fields] if different: om.out.information('different headers:') idx = 1 for field, value in different: om.out.information(' %d. %s:%s' % (idx, field, value)) idx += 1 if scantask.debug: tmp = StringIO.StringIO() om.out.information('headers:') pprint.pprint(clue.headers, stream=tmp, indent=2) om.out.information(tmp) om.out.information('\n') # This is added so other w3af plugins can read the halberd results. # If needed by other plugins, I could fill up the info object with more # data about the different headers, time, etc... i = Info('HTTP load balancer detected', desc, 1, self.get_name()) i['server'] = clue_info['server'].lstrip() i['server_number'] = len(clues) kb.kb.append(self, 'halberd', i)
def _report(self, scantask): """ Displays detailed report information to the user and save the data to the kb. :return: None. """ if len(scantask.analyzed) == 1: msg = '"%s" doesn\'t seem to have an HTTP load balancer' " configuration." om.out.information(msg % scantask.url) else: clues = scantask.analyzed hits = halberd_analysis.hits(clues) # In some strange cases, we have no clues about the remote # server. We just need to return in this case. if not len(clues): return # xxx This could be passed by the caller in order to avoid # recomputation in case the clues needed a re-analysis. diff_fields = halberd_analysis.diff_fields(clues) desc = ( "Target URL for HTTP load balancer detection: %s\n" "Number of real server(s) detected: %d\n" "Server information:\n %s" ) real_servers = " %s\n" % scantask.addr desc = desc % (scantask.url, len(clues), real_servers) om.out.information(desc) for num, clue in enumerate(clues): assert hits > 0 clue_info = clue.info om.out.information("") om.out.information("server %d: %s" % (num + 1, clue_info["server"].lstrip())) om.out.information("-" * 70 + "\n") om.out.information("difference: %d seconds" % clue.diff) om.out.information( "successful requests: %d hits (%.2f%%)" % (clue.get_count(), clue.get_count() * 100 / float(hits)) ) if clue_info["contloc"]: om.out.information("content-location: %s" % clue_info["contloc"].lstrip()) if len(clue_info["cookies"]) > 0: om.out.information("cookie(s):") for cookie in clue_info["cookies"]: om.out.information(" %s" % cookie.lstrip()) om.out.information("header fingerprint: %s" % clue_info["digest"]) different = [(field, value) for field, value in clue.headers if field in diff_fields] if different: om.out.information("different headers:") idx = 1 for field, value in different: om.out.information(" %d. %s:%s" % (idx, field, value)) idx += 1 if scantask.debug: tmp = StringIO.StringIO() om.out.information("headers:") pprint.pprint(clue.headers, stream=tmp, indent=2) om.out.information(tmp) om.out.information("\n") # This is added so other w3af plugins can read the halberd results. # If needed by other plugins, I could fill up the info object with more # data about the different headers, time, etc... i = Info("HTTP load balancer detected", desc, 1, self.get_name()) i["server"] = clue_info["server"].lstrip() i["server_number"] = len(clues) kb.kb.append(self, "halberd", i)