2009-10-05 04:10:22 -02:00
|
|
|
import os
|
2017-02-08 13:21:10 -03:00
|
|
|
from functools import partial
|
2019-11-21 14:18:49 +01:00
|
|
|
from twisted.trial.unittest import TestCase
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2015-04-20 21:23:05 -03:00
|
|
|
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
|
2010-08-10 17:36:48 -03:00
|
|
|
from scrapy.exceptions import NotConfigured
|
2019-11-21 14:18:49 +01:00
|
|
|
from scrapy.http import Request
|
2015-05-09 04:20:09 -03:00
|
|
|
from scrapy.spiders import Spider
|
2017-02-08 13:21:10 -03:00
|
|
|
from scrapy.crawler import Crawler
|
|
|
|
from scrapy.settings import Settings
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2013-12-28 00:47:32 +06:00
|
|
|
spider = Spider('foo')
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2016-01-20 13:52:52 -05:00
|
|
|
|
2018-06-20 16:56:46 +08:00
|
|
|
class TestHttpProxyMiddleware(TestCase):
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2009-10-08 16:31:37 +01:00
|
|
|
failureException = AssertionError
|
|
|
|
|
2009-10-05 04:10:22 -02:00
|
|
|
def setUp(self):
|
|
|
|
self._oldenv = os.environ.copy()
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
os.environ = self._oldenv
|
|
|
|
|
2017-02-08 13:21:10 -03:00
|
|
|
def test_not_enabled(self):
|
|
|
|
settings = Settings({'HTTPPROXY_ENABLED': False})
|
2019-02-01 13:50:01 +01:00
|
|
|
crawler = Crawler(Spider, settings)
|
2017-02-08 13:21:10 -03:00
|
|
|
self.assertRaises(NotConfigured, partial(HttpProxyMiddleware.from_crawler, crawler))
|
|
|
|
|
2017-10-01 12:24:56 -03:00
|
|
|
def test_no_environment_proxies(self):
|
2009-10-05 04:10:22 -02:00
|
|
|
os.environ = {'dummy_proxy': 'reset_env_and_do_not_raise'}
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
|
|
|
|
for url in ('http://e.com', 'https://e.com', 'file:///tmp/a'):
|
|
|
|
req = Request(url)
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.url, url)
|
|
|
|
self.assertEqual(req.meta, {})
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2017-10-01 12:24:56 -03:00
|
|
|
def test_environment_proxies(self):
|
2009-10-05 04:10:22 -02:00
|
|
|
os.environ['http_proxy'] = http_proxy = 'https://proxy.for.http:3128'
|
|
|
|
os.environ['https_proxy'] = https_proxy = 'http://proxy.for.https:8080'
|
|
|
|
os.environ.pop('file_proxy', None)
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
|
2020-05-06 18:56:14 -03:00
|
|
|
for url, proxy in [
|
|
|
|
('http://e.com', http_proxy),
|
|
|
|
('https://e.com', https_proxy),
|
|
|
|
('file://tmp/a', None),
|
|
|
|
]:
|
2009-10-05 04:10:22 -02:00
|
|
|
req = Request(url)
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.url, url)
|
|
|
|
self.assertEqual(req.meta.get('proxy'), proxy)
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2017-02-03 10:32:36 -03:00
|
|
|
def test_proxy_precedence_meta(self):
|
|
|
|
os.environ['http_proxy'] = 'https://proxy.com'
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': 'https://new.proxy:3128'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://new.proxy:3128'})
|
2017-02-03 10:32:36 -03:00
|
|
|
|
2009-10-05 04:10:22 -02:00
|
|
|
def test_proxy_auth(self):
|
|
|
|
os.environ['http_proxy'] = 'https://user:pass@proxy:3128'
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
req = Request('http://scrapytest.org')
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcjpwYXNz')
|
2017-02-03 10:32:36 -03:00
|
|
|
# proxy from request.meta
|
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': 'https://username:password@proxy:3128'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcm5hbWU6cGFzc3dvcmQ=')
|
2009-10-05 04:10:22 -02:00
|
|
|
|
2015-06-23 00:43:15 -03:00
|
|
|
def test_proxy_auth_empty_passwd(self):
|
|
|
|
os.environ['http_proxy'] = 'https://user:@proxy:3128'
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
req = Request('http://scrapytest.org')
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcjo=')
|
2017-02-03 10:32:36 -03:00
|
|
|
# proxy from request.meta
|
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': 'https://username:@proxy:3128'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcm5hbWU6')
|
2015-06-23 00:43:15 -03:00
|
|
|
|
2016-01-20 13:52:52 -05:00
|
|
|
def test_proxy_auth_encoding(self):
|
|
|
|
# utf-8 encoding
|
2020-07-30 13:39:30 +02:00
|
|
|
os.environ['http_proxy'] = 'https://m\u00E1n:pass@proxy:3128'
|
2016-01-20 13:52:52 -05:00
|
|
|
mw = HttpProxyMiddleware(auth_encoding='utf-8')
|
|
|
|
req = Request('http://scrapytest.org')
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic bcOhbjpwYXNz')
|
2016-01-20 13:52:52 -05:00
|
|
|
|
2017-02-03 10:32:36 -03:00
|
|
|
# proxy from request.meta
|
2020-07-30 13:39:30 +02:00
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': 'https://\u00FCser:pass@proxy:3128'})
|
2017-02-03 10:32:36 -03:00
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic w7xzZXI6cGFzcw==')
|
2017-02-03 10:32:36 -03:00
|
|
|
|
2016-01-20 13:52:52 -05:00
|
|
|
# default latin-1 encoding
|
|
|
|
mw = HttpProxyMiddleware(auth_encoding='latin-1')
|
|
|
|
req = Request('http://scrapytest.org')
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic beFuOnBhc3M=')
|
2016-01-20 13:52:52 -05:00
|
|
|
|
2017-02-03 10:32:36 -03:00
|
|
|
# proxy from request.meta, latin-1 encoding
|
2020-07-30 13:39:30 +02:00
|
|
|
req = Request('http://scrapytest.org', meta={'proxy': 'https://\u00FCser:pass@proxy:3128'})
|
2017-02-03 10:32:36 -03:00
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
|
|
|
|
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic /HNlcjpwYXNz')
|
2017-02-03 10:32:36 -03:00
|
|
|
|
2009-10-05 04:10:22 -02:00
|
|
|
def test_proxy_already_seted(self):
|
2017-02-03 10:32:36 -03:00
|
|
|
os.environ['http_proxy'] = 'https://proxy.for.http:3128'
|
2009-10-05 04:10:22 -02:00
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
req = Request('http://noproxy.com', meta={'proxy': None})
|
|
|
|
assert mw.process_request(req, spider) is None
|
|
|
|
assert 'proxy' in req.meta and req.meta['proxy'] is None
|
|
|
|
|
|
|
|
def test_no_proxy(self):
|
2017-02-03 10:32:36 -03:00
|
|
|
os.environ['http_proxy'] = 'https://proxy.for.http:3128'
|
2009-10-05 04:10:22 -02:00
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
|
|
|
|
os.environ['no_proxy'] = '*'
|
|
|
|
req = Request('http://noproxy.com')
|
|
|
|
assert mw.process_request(req, spider) is None
|
|
|
|
assert 'proxy' not in req.meta
|
|
|
|
|
|
|
|
os.environ['no_proxy'] = 'other.com'
|
|
|
|
req = Request('http://noproxy.com')
|
|
|
|
assert mw.process_request(req, spider) is None
|
|
|
|
assert 'proxy' in req.meta
|
|
|
|
|
|
|
|
os.environ['no_proxy'] = 'other.com,noproxy.com'
|
|
|
|
req = Request('http://noproxy.com')
|
|
|
|
assert mw.process_request(req, spider) is None
|
|
|
|
assert 'proxy' not in req.meta
|
2017-02-03 10:32:36 -03:00
|
|
|
|
|
|
|
# proxy from meta['proxy'] takes precedence
|
|
|
|
os.environ['no_proxy'] = '*'
|
|
|
|
req = Request('http://noproxy.com', meta={'proxy': 'http://proxy.com'})
|
|
|
|
assert mw.process_request(req, spider) is None
|
2017-08-07 18:29:36 +03:00
|
|
|
self.assertEqual(req.meta, {'proxy': 'http://proxy.com'})
|
2020-09-10 20:56:39 +05:30
|
|
|
|
|
|
|
def test_no_proxy_invalid_values(self):
|
|
|
|
os.environ['no_proxy'] = '/var/run/docker.sock'
|
|
|
|
mw = HttpProxyMiddleware()
|
|
|
|
# '/var/run/docker.sock' may be used by the user for
|
|
|
|
# no_proxy value but is not parseable and should be skipped
|
|
|
|
assert 'no' not in mw.proxies
|