def setUp(self):
     self.crawler = get_crawler(Spider)
     self.spider = self.crawler._create_spider('foo')
     self.mw = RetryMiddleware.from_crawler(self.crawler)
     self.mw.max_retry_times = 2
Ejemplo n.º 2
0
 def setUp(self):
     self.crawler = get_crawler(Spider)
     self.spider = self.crawler._create_spider('foo')
     self.mw = RetryMiddleware.from_crawler(self.crawler)
     self.mw.max_retry_times = 2
Ejemplo n.º 3
0
 def setUp(self):
     self.crawler = get_crawler(Spider)
     self.spider = self.crawler._create_spider('foo')
     self.mw = RetryMiddleware.from_crawler(self.crawler)
     self.mw.max_retry_times = 2
     self.invalid_url = 'http://www.scrapytest.org/invalid_url'
 def setUp(self):
     self.crawler = get_crawler(Spider)
     self.spider = self.crawler._create_spider('foo')
     self.mw = RetryMiddleware.from_crawler(self.crawler)
     self.mw.max_retry_times = 2
     self.invalid_url = 'http://www.scrapytest.org/invalid_url'
 def get_spider_and_middleware(self, settings=None):
     crawler = get_crawler(Spider, settings or {})
     spider = crawler._create_spider('foo')
     middleware = RetryMiddleware.from_crawler(crawler)
     return spider, middleware
 def setUp(self):
     crawler = get_crawler(Spider)
     self.spider = crawler._create_spider("foo")
     self.mw = RetryMiddleware.from_crawler(crawler)
     self.mw.max_retry_times = 2