def main(args): utils.dir_exists(args.output, True) zap = ZAPv2(apikey=config.ZAP_API, proxies=config.ZAP_PROXIES) # pylint: disable=unexpected-keyword-arg # Create new session zap.core.new_session(args.output) urls = utils.parse_webserver_urls(args.input) for url in urls: if not utils.check_url(url)[0]: continue run_zap_attack(url, zap)
def process_queue(webserver_queue, nikto_folder, args): while True: webserver = webserver_queue.get() url = "{}://{}:{}".format(webserver['service_name'], webserver['ipv4'], webserver['port']) if not utils.check_url(url)[0]: continue LOG.debug("Working on url: {}:{}".format(webserver['ipv4'], webserver['port'])) command, html_path = create_command(webserver, nikto_folder, args.proxy) text_output = run_commands.bash_command(command) run_commands.create_html_file(text_output, command, html_path) webserver_queue.task_done() continue
def process_queue(args, url_queue, imaged_urls, urls): while True: url = url_queue.get() if not args.force: file_name = get_filename(args.output_dir, url) if os.path.isfile(file_name): with LOG_LOCK: LOG.info( "Skipping {url} as it has already been done (use -f to force)." .format(url=url)) url_queue.task_done() continue # Use requests to check that the URL is valid (requests is much faster). valid, end_url = utils.check_url(url) with LOG_LOCK: LOG.debug("check_url results: {}, {}".format(valid, end_url)) if end_url in imaged_urls: with LOG_LOCK: LOG.info("Skipping, already took an image of this end_url {}". format(end_url)) url_queue.task_done() continue # Check if redirected and then if the end_url is in urls parsed_url = urlparse(end_url) if parsed_url.scheme == 'https' and parsed_url.port == 443: # remove port end_url = 'https://' + parsed_url.netloc.split( ":")[0] + parsed_url.path elif parsed_url.scheme == 'http' and parsed_url.port == 80: # remove port end_url = 'http://' + parsed_url.netloc.split( ":")[0] + parsed_url.path if not end_url == url: if end_url in urls: with LOG_LOCK: LOG.info( "Skipping, redirected to a url later in the list: {}". format(end_url)) url_queue.task_done() continue # If url is valid, take the screenshot and then add end_url to imaged_urls list if valid: take_screenshot(url, urls, args) imaged_urls.append(end_url) url_queue.task_done()
def main(args): utils.dir_exists(args.output, True) zap = ZAPv2(apikey=config.ZAP_API, proxies=config.ZAP_PROXIES) # pylint: disable=unexpected-keyword-arg # Create new session try: zap.core.new_session(args.output) except requests.exceptions.ProxyError: LOG.error("Couldn't attach to ZAP. Is it running?") return urls = utils.parse_webserver_urls(args.input) for url in urls: if not utils.check_url(url)[0]: continue run_zap_attack(url, zap)
def main(args): testssl_folder = os.path.join(args.output, "testssl") utils.dir_exists(testssl_folder, True) for url in utils.parse_webserver_urls(args.input): if not utils.uses_encryption(url): LOG.debug("Skipping, no encryption: {}".format(url)) continue if not utils.check_url(url)[0]: continue LOG.info("Testing url: {}".format(url)) testssl_command, html_output = create_command(url, testssl_folder) text_output = run_commands.bash_command(testssl_command) html_output = run_commands.create_html_file(text_output, testssl_command, html_output) LOG.debug("Saving output to {}".format(html_output))
def main(args): # noqa utils.dir_exists(args.output_dir, True) run_update() tested = 0 down = 0 timeout = 0 received_403 = 0 not_wordpress = 0 wordpress = 0 stackerror = 0 for url in utils.parse_webserver_urls(args.input): if utils.check_url(url)[0]: tested += 1 command, html_output = create_command(url, args.output_dir) results = run_command_tee_aha(command, html_output) if results == "down": down += 1 elif results == "403": received_403 += 1 elif results == "timeout": timeout += 1 elif results == "not wordpress": not_wordpress += 1 elif results == "wordpress": wordpress += 1 elif results == "stackerror": stackerror += 1 LOG.info("Finished testing:") LOG.info("Total sites tested {} - (some sites skipped based on response)".format(tested)) if down != 0: LOG.info("Websites that appeared to be down: {}".format(down)) if timeout != 0: LOG.info("Websites that timedout: {}".format(timeout)) if received_403 != 0: LOG.info("Websites that responded with a 403: {}".format(received_403)) if stackerror != 0: LOG.info("Stack error received: {}".format(stackerror)) if not_wordpress != 0: LOG.info("Websites that do not appear to be running WordPress: {}".format(not_wordpress)) LOG.info("Total running WordPress: {}".format(wordpress))
def main(args): utils.dir_exists(args.output, True) for url in utils.parse_webserver_urls(args.input): if utils.check_url(url)[0]: command, html_output = create_command(url, args.output) run_whatweb(command, html_output)