Beispiel #1
0
 def test_crawl(self):
     crawler = MassCrawl(seeds=self.unicode_seeds)
     crawler.crawl(num_threads=4, time_per_url=5, request_timeout=3,
          proxy_list=[{}])
     accumulated_target_urls = [x.url for x in crawler.targets]
     # The right way
     #self.assertEqual(baseline_target_urls, accumulated_target_urls)
     # The very not right way to work around inconsistent results from live targets
     # assert lists have an overlap that is within MARGIN items of the baseline.
     intersections = set(baseline_target_urls).intersection(accumulated_target_urls)
     intersect_length = len(intersections)
     base_length = len(baseline_target_urls)
     self.assertAlmostEqual(intersect_length, base_length, delta=MARGIN)
Beispiel #2
0
def mapper(output_file):
    seeds = get_stdin()
    mc = MassCrawl(seeds)
    logger.info("Adding seeds:")
    for seed in seeds:
        logger.info(mc.get_domain_from_url(seed))
        mc.add_to_scope_from_url(seed)
    mc.crawl(depth=3, num_threads=25, time_per_url=5, request_timeout=4, proxy_list=proxy_scan_list)
    if mc.targets:
        results = fuzz(mc.targets)
        for result in results:
            domain = mc.get_domain_from_url(result.fuzzy_target.url)
            output_file.write("domain: %s \\\\ results: %s\n" % (domain, result))
    else:
        logger.error("URL had no mc.targets for some reason... dump of seeds: %s", seeds)
        output_file.write("domain: %s \\\\ ERROR: no targets\n" % domain)