mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-22 07:13:08 +00:00
Merge pull request #4167 from noviluni/fix_E711_and_E713
fix E711 and E713
This commit is contained in:
commit
a78e58afe0
@ -192,14 +192,14 @@ flake8-ignore =
|
||||
tests/test_crawl.py E501 E741 E265
|
||||
tests/test_crawler.py F841 E306 E501
|
||||
tests/test_dependencies.py E302 F841 E501 E305
|
||||
tests/test_downloader_handlers.py E124 E127 E128 E225 E261 E265 F401 E501 E502 E701 E711 E126 E226 E123
|
||||
tests/test_downloader_handlers.py E124 E127 E128 E225 E261 E265 F401 E501 E502 E701 E126 E226 E123
|
||||
tests/test_downloadermiddleware.py E501
|
||||
tests/test_downloadermiddleware_ajaxcrawlable.py E302 E501
|
||||
tests/test_downloadermiddleware_cookies.py E731 E741 E501 E128 E303 E265 E126
|
||||
tests/test_downloadermiddleware_decompression.py E127
|
||||
tests/test_downloadermiddleware_defaultheaders.py E501
|
||||
tests/test_downloadermiddleware_downloadtimeout.py E501
|
||||
tests/test_downloadermiddleware_httpcache.py E713 E501 E302 E305 F401
|
||||
tests/test_downloadermiddleware_httpcache.py E501 E302 E305 F401
|
||||
tests/test_downloadermiddleware_httpcompression.py E501 F401 E251 E126 E123
|
||||
tests/test_downloadermiddleware_httpproxy.py F401 E501 E128
|
||||
tests/test_downloadermiddleware_redirect.py E501 E303 E128 E306 E127 E305
|
||||
|
@ -615,7 +615,7 @@ class Http11MockServerTestCase(unittest.TestCase):
|
||||
crawler = get_crawler(SingleRequestSpider)
|
||||
yield crawler.crawl(seed=Request(url=self.mockserver.url('')))
|
||||
failure = crawler.spider.meta.get('failure')
|
||||
self.assertTrue(failure == None)
|
||||
self.assertTrue(failure is None)
|
||||
reason = crawler.spider.meta['close_reason']
|
||||
self.assertTrue(reason, 'finished')
|
||||
|
||||
@ -636,7 +636,7 @@ class Http11MockServerTestCase(unittest.TestCase):
|
||||
yield crawler.crawl(seed=request)
|
||||
# download_maxsize = 50 is enough for the gzipped response
|
||||
failure = crawler.spider.meta.get('failure')
|
||||
self.assertTrue(failure == None)
|
||||
self.assertTrue(failure is None)
|
||||
reason = crawler.spider.meta['close_reason']
|
||||
self.assertTrue(reason, 'finished')
|
||||
else:
|
||||
|
@ -85,8 +85,8 @@ class _BaseTest(unittest.TestCase):
|
||||
|
||||
def assertEqualRequestButWithCacheValidators(self, request1, request2):
|
||||
self.assertEqual(request1.url, request2.url)
|
||||
assert not b'If-None-Match' in request1.headers
|
||||
assert not b'If-Modified-Since' in request1.headers
|
||||
assert b'If-None-Match' not in request1.headers
|
||||
assert b'If-Modified-Since' not in request1.headers
|
||||
assert any(h in request2.headers for h in (b'If-None-Match', b'If-Modified-Since'))
|
||||
self.assertEqual(request1.body, request2.body)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user