def get_defaults_spider_mw(self):
     crawler = get_crawler()
     spider = BaseSpider('foo')
     spider.set_crawler(crawler)
     defaults = dict([(k, [v]) for k, v in \
         crawler.settings.get('DEFAULT_REQUEST_HEADERS').iteritems()])
     return defaults, spider, DefaultHeadersMiddleware()
 def get_defaults_spider_mw(self):
     crawler = get_crawler()
     spider = BaseSpider('foo')
     spider.set_crawler(crawler)
     defaults = dict([(k, [v]) for k, v in \
         crawler.settings.get('DEFAULT_REQUEST_HEADERS').iteritems()])
     return defaults, spider, DefaultHeadersMiddleware.from_crawler(crawler)
class TestDefaultHeadersMiddleware(TestCase):

    def setUp(self):
        self.spider = BaseSpider('foo')
        self.mw = DefaultHeadersMiddleware()
        self.default_headers = dict([(k, [v]) for k, v in \
            settings.get('DEFAULT_REQUEST_HEADERS').iteritems()])

    def test_process_request(self):
        req = Request('http://www.scrapytest.org')
        self.mw.process_request(req, self.spider)
        self.assertEquals(req.headers, self.default_headers)

    def test_update_headers(self):
        headers = {'Accept-Language': ['es'], 'Test-Header': ['test']}
        req = Request('http://www.scrapytest.org', headers=headers)
        self.assertEquals(req.headers, headers)

        self.mw.process_request(req, self.spider)
        self.default_headers.update(headers)
        self.assertEquals(req.headers, self.default_headers)
 def setUp(self):
     self.spider = BaseSpider('foo')
     self.mw = DefaultHeadersMiddleware()
     self.default_headers = dict([(k, [v]) for k, v in \
         settings.get('DEFAULT_REQUEST_HEADERS').iteritems()])
 def get_defaults_spider_mw(self):
     crawler = get_crawler(Spider)
     spider = crawler._create_spider('foo')
     defaults = dict([(k, [v]) for k, v in \
         six.iteritems(crawler.settings.get('DEFAULT_REQUEST_HEADERS'))])
     return defaults, spider, DefaultHeadersMiddleware.from_crawler(crawler)