def create_spider_mw(self, compression_enabled=True, compression_header=False): settings = { 'COMPRESSION_ENABLED': compression_enabled, 'COMPRESSION_KEEP_ENCODING_HEADERS': compression_header } crawler = get_crawler(Spider, settings) spider = crawler._create_spider('foo') mw = HttpCompressionMiddleware.from_crawler(crawler) return spider, mw
def test_setting_true_compression_enabled(self): self.assertIsInstance( HttpCompressionMiddleware.from_crawler( get_crawler(settings_dict={'COMPRESSION_ENABLED': True})), HttpCompressionMiddleware)
def test_setting_default_compression_enabled(self): self.assertIsInstance( HttpCompressionMiddleware.from_crawler(get_crawler()), HttpCompressionMiddleware)
def setUp(self): self.crawler = get_crawler(Spider) self.spider = self.crawler._create_spider('scrapytest.org') self.mw = HttpCompressionMiddleware.from_crawler(self.crawler) self.crawler.stats.open_spider(self.spider)