def test_keep_cookie_from_default_request_headers_middleware(self): DEFAULT_REQUEST_HEADERS = dict(Cookie='default=value; asdf=qwerty') mw_default_headers = DefaultHeadersMiddleware( DEFAULT_REQUEST_HEADERS.items()) # overwrite with values from 'cookies' request argument req1 = Request('http://example.org', cookies={'default': 'something'}) assert mw_default_headers.process_request(req1, self.spider) is None assert self.mw.process_request(req1, self.spider) is None self.assertCookieValEqual(req1.headers['Cookie'], b'default=something; asdf=qwerty') # keep both req2 = Request('http://example.com', cookies={'a': 'b'}) assert mw_default_headers.process_request(req2, self.spider) is None assert self.mw.process_request(req2, self.spider) is None self.assertCookieValEqual(req2.headers['Cookie'], b'default=value; a=b; asdf=qwerty')
def test_keep_cookie_from_default_request_headers_middleware(self): DEFAULT_REQUEST_HEADERS = dict(Cookie="default=value; asdf=qwerty") mw_default_headers = DefaultHeadersMiddleware( DEFAULT_REQUEST_HEADERS.items()) # overwrite with values from 'cookies' request argument req1 = Request("http://example.org", cookies={"default": "something"}) assert mw_default_headers.process_request(req1, self.spider) is None assert self.mw.process_request(req1, self.spider) is None self.assertCookieValEqual(req1.headers["Cookie"], b"default=something; asdf=qwerty") # keep both req2 = Request("http://example.com", cookies={"a": "b"}) assert mw_default_headers.process_request(req2, self.spider) is None assert self.mw.process_request(req2, self.spider) is None self.assertCookieValEqual(req2.headers["Cookie"], b"default=value; a=b; asdf=qwerty")
def get_defaults_spider_mw(self): crawler = get_crawler(Spider) spider = crawler._create_spider('foo') defaults = { to_bytes(k): [to_bytes(v)] for k, v in crawler.settings.get('DEFAULT_REQUEST_HEADERS').items() } return defaults, spider, DefaultHeadersMiddleware.from_crawler(crawler)
def get_defaults_spider_mw(self): crawler = get_crawler(Spider) spider = crawler._create_spider('foo') defaults = dict([(k, [v]) for k, v in \ six.iteritems(crawler.settings.get('DEFAULT_REQUEST_HEADERS'))]) return defaults, spider, DefaultHeadersMiddleware.from_crawler(crawler)