コード例 #1
0
    def test_signals(self):
        """Need to be sure that all signals are bind to appropriate handlers
        right after crawler is created.

        """
        crawl_manager = CrawlManager('test', {'url': 'http://localhost'})

        signals_and_handlers = [
            ('item_scraped', 'get_item'),
            ('item_dropped', 'collect_dropped'),
            ('spider_opened', 'spider_opened'),
            ('spider_error', 'handle_spider_error'),
            ('request_scheduled', 'handle_scheduling'),
        ]
        for _, handler in signals_and_handlers:
            self._mock_method(crawl_manager, handler)
        settings = get_settings()
        crawler_process = ScrapyrtCrawlerProcess(settings, crawl_manager)
        crawler = crawler_process._create_crawler(MetaSpider)
        spider = MetaSpider()
        for signal, handler in signals_and_handlers:
            crawler.signals.send_catch_log(signal=getattr(signals, signal),
                                           spider=spider)
            handler_mock = getattr(crawl_manager, handler)
            self.assertEquals(handler_mock.call_count, 1)
コード例 #2
0
 def create_crawl_manager(self, kwargs=None):
     kwargs = kwargs if kwargs else self.kwargs.copy()
     crawl_manager = CrawlManager(self.spider.name, kwargs)
     crawl_manager.crawler = self.crawler
     return crawl_manager
コード例 #3
0
 def _create_crawl_manager(self):
     crawl_manager = CrawlManager(self.spider.name, self.kwargs.copy())
     crawl_manager.crawler = self.crawler
     return crawl_manager
コード例 #4
0
 def _create_crawl_manager(self):
     crawl_manager = CrawlManager(self.spider.name, self.kwargs.copy())
     crawl_manager.crawler = self.crawler
     return crawl_manager