def setUp(self): self.spider = BaseSpider('scrapytest.org') self.stats = StatsCollector() self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True)
class TestDepthMiddleware(TestCase): def setUp(self): crawler = get_crawler(Spider) self.spider = crawler._create_spider('scrapytest.org') self.stats = StatsCollector(crawler) self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True) def test_process_spider_output(self): req = Request('http://scrapytest.org') resp = Response('http://scrapytest.org') resp.request = req result = [Request('http://scrapytest.org')] out = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out, result) rdc = self.stats.get_value('request_depth_count/1', spider=self.spider) self.assertEquals(rdc, 1) req.meta['depth'] = 1 out2 = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out2, []) rdm = self.stats.get_value('request_depth_max', spider=self.spider) self.assertEquals(rdm, 1) def tearDown(self): self.stats.close_spider(self.spider, '')
class TestDepthMiddleware(TestCase): def setUp(self): self.spider = BaseSpider('scrapytest.org') self.stats = StatsCollector() self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True) def test_process_spider_output(self): req = Request('http://scrapytest.org') resp = Response('http://scrapytest.org') resp.request = req result = [Request('http://scrapytest.org')] out = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out, result) rdc = self.stats.get_value('request_depth_count/1', spider=self.spider) self.assertEquals(rdc, 1) req.meta['depth'] = 1 out2 = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out2, []) rdm = self.stats.get_value('request_depth_max', spider=self.spider) self.assertEquals(rdm, 1) def tearDown(self): self.stats.close_spider(self.spider, '')
def setUp(self): self.spider = BaseSpider('scrapytest.org') self.stats = StatsCollector() self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats) self.assertEquals(self.stats.get_value('envinfo/request_depth_limit'), 1)
def setUp(self): crawler = get_crawler(Spider) self.spider = crawler._create_spider('scrapytest.org') self.stats = StatsCollector(crawler) self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True)
def setUp(self): self.spider = Spider('scrapytest.org') self.stats = StatsCollector(get_crawler()) self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True)
def setUp(self): settings.disabled = False settings.overrides['DEPTH_LIMIT'] = 1 settings.overrides['DEPTH_STATS'] = True self.spider = BaseSpider('scrapytest.org') stats.open_spider(self.spider) self.mw = DepthMiddleware() self.assertEquals(stats.get_value('envinfo/request_depth_limit'), 1)
class TestDepthMiddleware(TestCase): def setUp(self): settings.disabled = False settings.overrides['DEPTH_LIMIT'] = 1 settings.overrides['DEPTH_STATS'] = True self.spider = BaseSpider('scrapytest.org') stats.open_spider(self.spider) self.mw = DepthMiddleware() self.assertEquals(stats.get_value('envinfo/request_depth_limit'), 1) def test_process_spider_output(self): req = Request('http://scrapytest.org') resp = Response('http://scrapytest.org') resp.request = req result = [Request('http://scrapytest.org')] out = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out, result) rdc = stats.get_value('request_depth_count/1', spider=self.spider) self.assertEquals(rdc, 1) req.meta['depth'] = 1 out2 = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out2, []) rdm = stats.get_value('request_depth_max', spider=self.spider) self.assertEquals(rdm, 1) def tearDown(self): del settings.overrides['DEPTH_LIMIT'] del settings.overrides['DEPTH_STATS'] settings.disabled = True stats.close_spider(self.spider, '')