Esempio n. 1
0
 def test_proxy_already_seted(self):
     os.environ['http_proxy'] = 'https://proxy.for.http:3128'
     mw = HttpProxyMiddleware.from_crawler(self.crawler)
     mw.spider_opened(self.spider)
     cached_proxy_bypass.cache_clear()
     req = Request('http://noproxy.com', meta={'proxy': None})
     assert mw.process_request(req, spider) is None
     assert 'proxy' in req.meta and req.meta['proxy'] is None
Esempio n. 2
0
 def test_proxy_precedence_meta(self):
     os.environ['http_proxy'] = 'https://proxy.com'
     mw = HttpProxyMiddleware.from_crawler(self.crawler)
     mw.spider_opened(self.spider)
     cached_proxy_bypass.cache_clear()
     req = Request('http://scrapytest.org',
                   meta={'proxy': 'https://new.proxy:3128'})
     assert mw.process_request(req, spider) is None
     self.assertEqual(req.meta, {'proxy': 'https://new.proxy:3128'})
     mw.spider_closed(self.spider)
Esempio n. 3
0
 def test_no_environment_proxies(self):
     os.environ = {'dummy_proxy': 'reset_env_and_do_not_raise'}
     mw = HttpProxyMiddleware.from_crawler(self.crawler)
     mw.spider_opened(self.spider)
     for url in ('http://e.com', 'https://e.com', 'file:///tmp/a'):
         cached_proxy_bypass.cache_clear()
         req = Request(url)
         assert mw.process_request(req, spider) is None
         self.assertEqual(req.url, url)
         self.assertEqual(req.meta, {})
     mw.spider_closed(self.spider)
Esempio n. 4
0
    def test_no_proxy(self):
        os.environ['http_proxy'] = 'https://proxy.for.http:3128'
        mw = HttpProxyMiddleware.from_crawler(self.crawler)
        mw.spider_opened(self.spider)

        cached_proxy_bypass.cache_clear()
        os.environ['no_proxy'] = '*'
        req = Request('http://noproxy.com')
        assert mw.process_request(req, spider) is None
        assert 'proxy' not in req.meta

        cached_proxy_bypass.cache_clear()
        os.environ['no_proxy'] = 'other.com'
        req = Request('http://noproxy.com')
        assert mw.process_request(req, spider) is None
        assert 'proxy' in req.meta

        cached_proxy_bypass.cache_clear()
        os.environ['no_proxy'] = 'other.com,noproxy.com'
        req = Request('http://noproxy.com')
        assert mw.process_request(req, spider) is None
        assert 'proxy' not in req.meta

        # proxy from meta['proxy'] takes precedence
        cached_proxy_bypass.cache_clear()
        os.environ['no_proxy'] = '*'
        req = Request('http://noproxy.com', meta={'proxy': 'http://proxy.com'})
        assert mw.process_request(req, spider) is None
        self.assertEqual(req.meta, {'proxy': 'http://proxy.com'})

        mw.spider_closed(self.spider)
Esempio n. 5
0
 def test_environment_proxies(self):
     os.environ['http_proxy'] = http_proxy = 'https://proxy.for.http:3128'
     os.environ['https_proxy'] = https_proxy = 'http://proxy.for.https:8080'
     os.environ.pop('file_proxy', None)
     mw = HttpProxyMiddleware.from_crawler(self.crawler)
     mw.spider_opened(self.spider)
     for url, proxy in [('http://e.com', http_proxy),
                        ('https://e.com', https_proxy),
                        ('file://tmp/a', None)]:
         cached_proxy_bypass.cache_clear()
         req = Request(url)
         assert mw.process_request(req, spider) is None
         self.assertEqual(req.url, url)
         self.assertEqual(req.meta.get('proxy'), proxy)
     mw.spider_closed(self.spider)
Esempio n. 6
0
 def test_proxy_auth_empty_passwd(self):
     os.environ['http_proxy'] = 'https://user:@proxy:3128'
     mw = HttpProxyMiddleware.from_crawler(self.crawler)
     mw.spider_opened(self.spider)
     cached_proxy_bypass.cache_clear()
     req = Request('http://scrapytest.org')
     assert mw.process_request(req, spider) is None
     self.assertEqual(req.meta, {'proxy': 'https://*****:*****@proxy:3128'})
     assert mw.process_request(req, spider) is None
     self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
     self.assertEqual(req.headers.get('Proxy-Authorization'),
                      b'Basic dXNlcm5hbWU6')
     mw.spider_closed(self.spider)
Esempio n. 7
0
    def test_proxy_auth_encoding(self):
        # utf-8 encoding
        os.environ['http_proxy'] = u'https://m\u00E1n:pass@proxy:3128'
        settings = deepcopy(self.settings)
        settings.update({'HTTPPROXY_AUTH_ENCODING': 'utf-8'})
        crawler = Crawler(spider, settings)
        mw = HttpProxyMiddleware.from_crawler(crawler)
        mw.spider_opened(self.spider)

        cached_proxy_bypass.cache_clear()
        req = Request('http://scrapytest.org')
        assert mw.process_request(req, spider) is None
        self.assertEqual(req.meta, {'proxy': 'https://*****:*****@proxy:3128'})
        assert mw.process_request(req, spider) is None
        self.assertEqual(req.meta, {'proxy': 'https://*****:*****@proxy:3128'})
        assert mw.process_request(req, spider) is None
        self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
        self.assertEqual(req.headers.get('Proxy-Authorization'),
                         b'Basic /HNlcjpwYXNz')

        mw.spider_closed(self.spider)