2009-09-04 12:29:43 -03:00
|
|
|
from unittest import TestCase
|
2014-07-14 18:47:22 +08:00
|
|
|
import six
|
2009-09-04 12:29:43 -03:00
|
|
|
|
2015-04-20 21:23:05 -03:00
|
|
|
from scrapy.downloadermiddlewares.defaultheaders import DefaultHeadersMiddleware
|
2010-07-16 14:51:14 -03:00
|
|
|
from scrapy.http import Request
|
2015-05-09 04:20:09 -03:00
|
|
|
from scrapy.spiders import Spider
|
2010-09-22 16:09:13 -03:00
|
|
|
from scrapy.utils.test import get_crawler
|
2009-09-04 12:29:43 -03:00
|
|
|
|
|
|
|
|
|
|
|
class TestDefaultHeadersMiddleware(TestCase):
|
|
|
|
|
2010-09-22 16:09:13 -03:00
|
|
|
def get_defaults_spider_mw(self):
|
2014-07-31 04:12:12 -03:00
|
|
|
crawler = get_crawler(Spider)
|
|
|
|
spider = crawler._create_spider('foo')
|
2010-09-22 16:09:13 -03:00
|
|
|
defaults = dict([(k, [v]) for k, v in \
|
2014-07-14 18:47:22 +08:00
|
|
|
six.iteritems(crawler.settings.get('DEFAULT_REQUEST_HEADERS'))])
|
2012-08-21 17:27:45 -03:00
|
|
|
return defaults, spider, DefaultHeadersMiddleware.from_crawler(crawler)
|
2009-09-04 12:29:43 -03:00
|
|
|
|
|
|
|
def test_process_request(self):
|
2010-09-22 16:09:13 -03:00
|
|
|
defaults, spider, mw = self.get_defaults_spider_mw()
|
2009-09-04 12:29:43 -03:00
|
|
|
req = Request('http://www.scrapytest.org')
|
2010-09-22 16:09:13 -03:00
|
|
|
mw.process_request(req, spider)
|
|
|
|
self.assertEquals(req.headers, defaults)
|
2009-09-04 12:29:43 -03:00
|
|
|
|
|
|
|
def test_update_headers(self):
|
2010-09-22 16:09:13 -03:00
|
|
|
defaults, spider, mw = self.get_defaults_spider_mw()
|
2009-09-04 12:29:43 -03:00
|
|
|
headers = {'Accept-Language': ['es'], 'Test-Header': ['test']}
|
|
|
|
req = Request('http://www.scrapytest.org', headers=headers)
|
|
|
|
self.assertEquals(req.headers, headers)
|
|
|
|
|
2010-09-22 16:09:13 -03:00
|
|
|
mw.process_request(req, spider)
|
|
|
|
defaults.update(headers)
|
|
|
|
self.assertEquals(req.headers, defaults)
|