Пример #1
0
 def test_collector(self):
     stats = StatsCollector(self.crawler)
     self.assertEqual(stats.get_stats(), {})
     self.assertEqual(stats.get_value('anything'), None)
     self.assertEqual(stats.get_value('anything', 'default'), 'default')
     stats.set_value('test', 'value')
     self.assertEqual(stats.get_stats(), {'test': 'value'})
     stats.set_value('test2', 23)
     self.assertEqual(stats.get_stats(), {'test': 'value', 'test2': 23})
     self.assertEqual(stats.get_value('test2'), 23)
     stats.inc_value('test2')
     self.assertEqual(stats.get_value('test2'), 24)
     stats.inc_value('test2', 6)
     self.assertEqual(stats.get_value('test2'), 30)
     stats.max_value('test2', 6)
     self.assertEqual(stats.get_value('test2'), 30)
     stats.max_value('test2', 40)
     self.assertEqual(stats.get_value('test2'), 40)
     stats.max_value('test3', 1)
     self.assertEqual(stats.get_value('test3'), 1)
     stats.min_value('test2', 60)
     self.assertEqual(stats.get_value('test2'), 40)
     stats.min_value('test2', 35)
     self.assertEqual(stats.get_value('test2'), 35)
     stats.min_value('test4', 7)
     self.assertEqual(stats.get_value('test4'), 7)
Пример #2
0
    def setUp(self):
        self.spider = BaseSpider('scrapytest.org')

        self.stats = StatsCollector()
        self.stats.open_spider(self.spider)

        self.mw = DepthMiddleware(1, self.stats, True)
Пример #3
0
    def setUp(self):
        self.spider = BaseSpider('scrapytest.org')

        self.stats = StatsCollector()
        self.stats.open_spider(self.spider)

        self.mw = DepthMiddleware(1, self.stats)
        self.assertEquals(self.stats.get_value('envinfo/request_depth_limit'), 1)
Пример #4
0
    def setUp(self):
        crawler = get_crawler(Spider)
        self.spider = crawler._create_spider('scrapytest.org')

        self.stats = StatsCollector(crawler)
        self.stats.open_spider(self.spider)

        self.mw = DepthMiddleware(1, self.stats, True)
Пример #5
0
    def test_signals(self):
        signals_catched = set()

        def spider_opened(spider):
            assert spider is self.spider
            signals_catched.add(stats_spider_opened)

        def spider_closing(spider, reason):
            assert spider is self.spider
            assert reason == 'testing'
            signals_catched.add(stats_spider_closing)

        def spider_closed(spider, reason, spider_stats):
            assert spider is self.spider
            assert reason == 'testing'
            assert spider_stats == {'test': 1}
            signals_catched.add(stats_spider_closed)

        self.crawler.signals.connect(spider_opened, signal=stats_spider_opened)
        self.crawler.signals.connect(spider_closing,
                                     signal=stats_spider_closing)
        self.crawler.signals.connect(spider_closed, signal=stats_spider_closed)

        stats = StatsCollector(self.crawler)
        stats.open_spider(self.spider)
        stats.set_value('test', 1, spider=self.spider)
        self.assertEqual([(self.spider, {
            'test': 1
        })], list(stats.iter_spider_stats()))
        stats.close_spider(self.spider, 'testing')
        assert stats_spider_opened in signals_catched
        assert stats_spider_closing in signals_catched
        assert stats_spider_closed in signals_catched

        self.crawler.signals.disconnect(spider_opened,
                                        signal=stats_spider_opened)
        self.crawler.signals.disconnect(spider_closing,
                                        signal=stats_spider_closing)
        self.crawler.signals.disconnect(spider_closed,
                                        signal=stats_spider_closed)