Exemplo n.º 1
0
def mapper(output_file):
    seeds = get_stdin()
    mc = MassCrawl(seeds)
    logger.info("Adding seeds:")
    for seed in seeds:
        logger.info(mc.get_domain_from_url(seed))
        mc.add_to_scope_from_url(seed)
    mc.crawl(depth=3, num_threads=25, time_per_url=5, request_timeout=4, proxy_list=proxy_scan_list)
    if mc.targets:
        results = fuzz(mc.targets)
        for result in results:
            domain = mc.get_domain_from_url(result.fuzzy_target.url)
            output_file.write("domain: %s \\\\ results: %s\n" % (domain, result))
    else:
        logger.error("URL had no mc.targets for some reason... dump of seeds: %s", seeds)
        output_file.write("domain: %s \\\\ ERROR: no targets\n" % domain)