2009-10-05 04:10:22 -02:00
|
|
|
import os
|
2009-10-08 16:31:37 +01:00
|
|
|
import sys
|
2017-02-08 13:21:10 -03:00
|
|
|
from functools import partial
|
2009-10-08 16:31:37 +01:00
|
|
|
from twisted.trial.unittest import TestCase, SkipTest
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2015-04-20 21:23:05 -03:00
|
|
|
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
|
2010-08-10 17:36:48 -03:00
|
|
|
from scrapy.exceptions import NotConfigured
|
2009-10-05 04:10:22 -02:00
|
|
|
from scrapy.http import Response, Request
|
2015-05-09 04:20:09 -03:00
|
|
|
from scrapy.spiders import Spider
|
2017-02-08 13:21:10 -03:00
|
|
|
from scrapy.crawler import Crawler
|
|
|
|
from scrapy.settings import Settings
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2013-12-28 00:47:32 +06:00
|
|
|
spider = Spider('foo')
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2016-01-20 13:52:52 -05:00
|
|
|
|
2009-10-05 04:10:22 -02:00
|
|
|
class TestDefaultHeadersMiddleware(TestCase):
|
|
|
|
|
2009-10-08 16:31:37 +01:00
|
|
|
failureException = AssertionError
|
|
|
|
|
2009-10-05 04:10:22 -02:00
|
|
|
def setUp(self):
|
|
|
|
self._oldenv = os.environ.copy()
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
os.environ = self._oldenv
|
|
|
|
|
2017-02-08 13:21:10 -03:00
|
|
|
def test_not_enabled(self):
|
|
|
|
settings = Settings({'HTTPPROXY_ENABLED': False})
|
|
|
|
crawler = Crawler(spider, settings)
|
|
|
|
self.assertRaises(NotConfigured, partial(HttpProxyMiddleware.from_crawler, crawler))
|
|
|
|
|
2009-10-05 04:10:22 -02:00
|
|
|
def test_no_enviroment_proxies(self):
|
|
|
|
os.environ = {'dummy_proxy': 'reset_env_and_do_not_raise'}
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
|
|
|
|
for url in ('http://e.com', 'https://e.com', 'file:///tmp/a'):
|
|
|
|
req = Request(url)
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.url, url)
|
|
|
|
self.assertEqual(req.meta, {})
|
2009-10-05 04:10:22 -02:00
|
|
|
|
|
|
|
def test_enviroment_proxies(self):
|
|
|
|
os.environ['http_proxy'] = http_proxy = 'https://proxy.for.http:3128'
|
|
|
|
os.environ['https_proxy'] = https_proxy = 'http://proxy.for.https:8080'
|
|
|
|
os.environ.pop('file_proxy', None)
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
|
|
|
|
for url, proxy in [('http://e.com', http_proxy),
|
|
|
|
('https://e.com', https_proxy), ('file://tmp/a', None)]:
|
|
|
|
req = Request(url)
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.url, url)
|
|
|
|
self.assertEqual(req.meta.get('proxy'), proxy)
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2017-02-03 10:32:36 -03:00
|
|
|
def test_proxy_precedence_meta(self):
|
|
|
|
os.environ['http_proxy'] = 'https://proxy.com'
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': 'https://new.proxy:3128'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://new.proxy:3128'})
|
2017-02-03 10:32:36 -03:00
|
|
|
|
2009-10-05 04:10:22 -02:00
|
|
|
def test_proxy_auth(self):
|
|
|
|
os.environ['http_proxy'] = 'https://user:pass@proxy:3128'
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
req = Request('http://scrapytest.org')
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcjpwYXNz')
|
2017-02-03 10:32:36 -03:00
|
|
|
# proxy from request.meta
|
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': 'https://username:password@proxy:3128'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcm5hbWU6cGFzc3dvcmQ=')
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2015-06-23 00:43:15 -03:00
|
|
|
def test_proxy_auth_empty_passwd(self):
|
|
|
|
os.environ['http_proxy'] = 'https://user:@proxy:3128'
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
req = Request('http://scrapytest.org')
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcjo=')
|
2017-02-03 10:32:36 -03:00
|
|
|
# proxy from request.meta
|
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': 'https://username:@proxy:3128'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcm5hbWU6')
|
2015-06-23 00:43:15 -03:00
|
|
|
|
2016-01-20 13:52:52 -05:00
|
|
|
def test_proxy_auth_encoding(self):
|
|
|
|
# utf-8 encoding
|
|
|
|
os.environ['http_proxy'] = u'https://m\u00E1n:pass@proxy:3128'
|
|
|
|
mw = HttpProxyMiddleware(auth_encoding='utf-8')
|
|
|
|
req = Request('http://scrapytest.org')
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic bcOhbjpwYXNz')
|
2016-01-20 13:52:52 -05:00
|
|
|
|
2017-02-03 10:32:36 -03:00
|
|
|
# proxy from request.meta
|
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': u'https://\u00FCser:pass@proxy:3128'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic w7xzZXI6cGFzcw==')
|
2017-02-03 10:32:36 -03:00
|
|
|
|
2016-01-20 13:52:52 -05:00
|
|
|
# default latin-1 encoding
|
|
|
|
mw = HttpProxyMiddleware(auth_encoding='latin-1')
|
|
|
|
req = Request('http://scrapytest.org')
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic beFuOnBhc3M=')
|
2016-01-20 13:52:52 -05:00
|
|
|
|
2017-02-03 10:32:36 -03:00
|
|
|
# proxy from request.meta, latin-1 encoding
|
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': u'https://\u00FCser:pass@proxy:3128'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic /HNlcjpwYXNz')
|
2017-02-03 10:32:36 -03:00
|
|
|
|
2009-10-05 04:10:22 -02:00
|
|
|
def test_proxy_already_seted(self):
|
2017-02-03 10:32:36 -03:00
|
|
|
os.environ['http_proxy'] = 'https://proxy.for.http:3128'
|
2009-10-05 04:10:22 -02:00
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
req = Request('http://noproxy.com', meta={'proxy': None})
|
|
|
|
assert mw.process_request(req, spider) is None
|
|
|
|
assert 'proxy' in req.meta and req.meta['proxy'] is None
|
|
|
|
|
|
|
|
def test_no_proxy(self):
|
2017-02-03 10:32:36 -03:00
|
|
|
os.environ['http_proxy'] = 'https://proxy.for.http:3128'
|
2009-10-05 04:10:22 -02:00
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
|
|
|
|
os.environ['no_proxy'] = '*'
|
|
|
|
req = Request('http://noproxy.com')
|
|
|
|
assert mw.process_request(req, spider) is None
|
|
|
|
assert 'proxy' not in req.meta
|
|
|
|
|
|
|
|
os.environ['no_proxy'] = 'other.com'
|
|
|
|
req = Request('http://noproxy.com')
|
|
|
|
assert mw.process_request(req, spider) is None
|
|
|
|
assert 'proxy' in req.meta
|
|
|
|
|
|
|
|
os.environ['no_proxy'] = 'other.com,noproxy.com'
|
|
|
|
req = Request('http://noproxy.com')
|
|
|
|
assert mw.process_request(req, spider) is None
|
|
|
|
assert 'proxy' not in req.meta
|
2017-02-03 10:32:36 -03:00
|
|
|
|
|
|
|
# proxy from meta['proxy'] takes precedence
|
|
|
|
os.environ['no_proxy'] = '*'
|
|
|
|
req = Request('http://noproxy.com', meta={'proxy': 'http://proxy.com'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'http://proxy.com'})
|