class TestDepthMiddleware(TestCase): def setUp(self): crawler = get_crawler(Spider) self.spider = crawler._create_spider('scrapytest.org') self.stats = StatsCollector(crawler) self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True) def test_process_spider_output(self): req = Request('http://scrapytest.org') resp = Response('http://scrapytest.org') resp.request = req result = [Request('http://scrapytest.org')] out = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out, result) rdc = self.stats.get_value('request_depth_count/1', spider=self.spider) self.assertEquals(rdc, 1) req.meta['depth'] = 1 out2 = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out2, []) rdm = self.stats.get_value('request_depth_max', spider=self.spider) self.assertEquals(rdm, 1) def tearDown(self): self.stats.close_spider(self.spider, '')
def test_signals(self): signals_catched = set() def spider_opened(spider): assert spider is self.spider signals_catched.add(stats_spider_opened) def spider_closing(spider, reason): assert spider is self.spider assert reason == 'testing' signals_catched.add(stats_spider_closing) def spider_closed(spider, reason, spider_stats): assert spider is self.spider assert reason == 'testing' assert spider_stats == {'test': 1} signals_catched.add(stats_spider_closed) dispatcher.connect(spider_opened, signal=stats_spider_opened) dispatcher.connect(spider_closing, signal=stats_spider_closing) dispatcher.connect(spider_closed, signal=stats_spider_closed) stats = StatsCollector() stats.open_spider(self.spider) stats.set_value('test', 1, spider=self.spider) self.assertEqual([(self.spider, {'test': 1})], list(stats.iter_spider_stats())) stats.close_spider(self.spider, 'testing') assert stats_spider_opened in signals_catched assert stats_spider_closing in signals_catched assert stats_spider_closed in signals_catched dispatcher.disconnect(spider_opened, signal=stats_spider_opened) dispatcher.disconnect(spider_closing, signal=stats_spider_closing) dispatcher.disconnect(spider_closed, signal=stats_spider_closed)
def test_signals(self): signals_catched = set() def spider_opened(spider): assert spider is self.spider signals_catched.add(stats_spider_opened) def spider_closing(spider, reason): assert spider is self.spider assert reason == "testing" signals_catched.add(stats_spider_closing) def spider_closed(spider, reason, spider_stats): assert spider is self.spider assert reason == "testing" assert spider_stats == {"test": 1} signals_catched.add(stats_spider_closed) self.crawler.signals.connect(spider_opened, signal=stats_spider_opened) self.crawler.signals.connect(spider_closing, signal=stats_spider_closing) self.crawler.signals.connect(spider_closed, signal=stats_spider_closed) stats = StatsCollector(self.crawler) stats.open_spider(self.spider) stats.set_value("test", 1, spider=self.spider) self.assertEqual([(self.spider, {"test": 1})], list(stats.iter_spider_stats())) stats.close_spider(self.spider, "testing") assert stats_spider_opened in signals_catched assert stats_spider_closing in signals_catched assert stats_spider_closed in signals_catched self.crawler.signals.disconnect(spider_opened, signal=stats_spider_opened) self.crawler.signals.disconnect(spider_closing, signal=stats_spider_closing) self.crawler.signals.disconnect(spider_closed, signal=stats_spider_closed)
class TestDepthMiddleware(TestCase): def setUp(self): self.spider = BaseSpider('scrapytest.org') self.stats = StatsCollector() self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True) def test_process_spider_output(self): req = Request('http://scrapytest.org') resp = Response('http://scrapytest.org') resp.request = req result = [Request('http://scrapytest.org')] out = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out, result) rdc = self.stats.get_value('request_depth_count/1', spider=self.spider) self.assertEquals(rdc, 1) req.meta['depth'] = 1 out2 = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out2, []) rdm = self.stats.get_value('request_depth_max', spider=self.spider) self.assertEquals(rdm, 1) def tearDown(self): self.stats.close_spider(self.spider, '')
def test_signals(self): signals_catched = set() def spider_opened(spider): assert spider is self.spider signals_catched.add(stats_spider_opened) def spider_closing(spider, reason): assert spider is self.spider assert reason == 'testing' signals_catched.add(stats_spider_closing) def spider_closed(spider, reason, spider_stats): assert spider is self.spider assert reason == 'testing' assert spider_stats == {'test': 1} signals_catched.add(stats_spider_closed) self.crawler.signals.connect(spider_opened, signal=stats_spider_opened) self.crawler.signals.connect(spider_closing, signal=stats_spider_closing) self.crawler.signals.connect(spider_closed, signal=stats_spider_closed) stats = StatsCollector(self.crawler) stats.open_spider(self.spider) stats.set_value('test', 1, spider=self.spider) self.assertEqual([(self.spider, { 'test': 1 })], list(stats.iter_spider_stats())) stats.close_spider(self.spider, 'testing') assert stats_spider_opened in signals_catched assert stats_spider_closing in signals_catched assert stats_spider_closed in signals_catched self.crawler.signals.disconnect(spider_opened, signal=stats_spider_opened) self.crawler.signals.disconnect(spider_closing, signal=stats_spider_closing) self.crawler.signals.disconnect(spider_closed, signal=stats_spider_closed)