1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-06 23:11:50 +00:00

fix E30X flake8 (#4355)

This commit is contained in:
Marc Hernández 2020-02-21 06:05:31 +01:00 committed by GitHub
parent c4ee4b6075
commit 91bbc70bc1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 58 additions and 45 deletions

View File

@ -47,17 +47,17 @@ flake8-ignore =
scrapy/contracts/__init__.py E501 W504
scrapy/contracts/default.py E128
# scrapy/core
scrapy/core/engine.py E501 E128 E127 E306 E502
scrapy/core/engine.py E501 E128 E127 E502
scrapy/core/scheduler.py E501
scrapy/core/scraper.py E501 E306 E128 W504
scrapy/core/scraper.py E501 E128 W504
scrapy/core/spidermw.py E501 E731 E126 E226
scrapy/core/downloader/__init__.py E501
scrapy/core/downloader/contextfactory.py E501 E128 E126
scrapy/core/downloader/middleware.py E501 E502
scrapy/core/downloader/tls.py E501 E305 E241
scrapy/core/downloader/tls.py E501 E241
scrapy/core/downloader/webclient.py E731 E501 E128 E126 E226
scrapy/core/downloader/handlers/__init__.py E501
scrapy/core/downloader/handlers/ftp.py E501 E305 E128 E127
scrapy/core/downloader/handlers/ftp.py E501 E128 E127
scrapy/core/downloader/handlers/http10.py E501
scrapy/core/downloader/handlers/http11.py E501
scrapy/core/downloader/handlers/s3.py E501 E128 E126
@ -76,7 +76,7 @@ flake8-ignore =
scrapy/extensions/closespider.py E501 E128 E123
scrapy/extensions/corestats.py E501
scrapy/extensions/feedexport.py E128 E501
scrapy/extensions/httpcache.py E128 E501 E303
scrapy/extensions/httpcache.py E128 E501
scrapy/extensions/memdebug.py E501
scrapy/extensions/spiderstate.py E501
scrapy/extensions/telnet.py E501 W504
@ -121,12 +121,11 @@ flake8-ignore =
scrapy/utils/asyncio.py E501
scrapy/utils/benchserver.py E501
scrapy/utils/conf.py E402 E501
scrapy/utils/console.py E306 E305
scrapy/utils/datatypes.py E501 E226
scrapy/utils/decorators.py E501
scrapy/utils/defer.py E501 E128
scrapy/utils/deprecate.py E128 E501 E127 E502
scrapy/utils/gz.py E305 E501 W504
scrapy/utils/gz.py E501 W504
scrapy/utils/http.py F403 E226
scrapy/utils/httpobj.py E501
scrapy/utils/iterators.py E501 E701
@ -161,7 +160,7 @@ flake8-ignore =
scrapy/middleware.py E128 E501
scrapy/pqueues.py E501
scrapy/resolver.py E501
scrapy/responsetypes.py E128 E501 E305
scrapy/responsetypes.py E128 E501
scrapy/robotstxt.py E501
scrapy/shell.py E501
scrapy/signalmanager.py E501
@ -175,50 +174,50 @@ flake8-ignore =
tests/spiders.py E501 E127
tests/test_closespider.py E501 E127
tests/test_command_fetch.py E501
tests/test_command_parse.py E501 E128 E303 E226
tests/test_command_parse.py E501 E128 E226
tests/test_command_shell.py E501 E128
tests/test_commands.py E128 E501
tests/test_contracts.py E501 E128
tests/test_crawl.py E501 E741 E265
tests/test_crawler.py F841 E306 E501
tests/test_dependencies.py F841 E501 E305
tests/test_crawler.py F841 E501
tests/test_dependencies.py F841 E501
tests/test_downloader_handlers.py E124 E127 E128 E225 E265 E501 E701 E126 E226 E123
tests/test_downloadermiddleware.py E501
tests/test_downloadermiddleware_ajaxcrawlable.py E501
tests/test_downloadermiddleware_cookies.py E731 E741 E501 E128 E303 E265 E126
tests/test_downloadermiddleware_cookies.py E731 E741 E501 E128 E265 E126
tests/test_downloadermiddleware_decompression.py E127
tests/test_downloadermiddleware_defaultheaders.py E501
tests/test_downloadermiddleware_downloadtimeout.py E501
tests/test_downloadermiddleware_httpcache.py E501 E305
tests/test_downloadermiddleware_httpcache.py E501
tests/test_downloadermiddleware_httpcompression.py E501 E251 E126 E123
tests/test_downloadermiddleware_httpproxy.py E501 E128
tests/test_downloadermiddleware_redirect.py E501 E303 E128 E306 E127 E305
tests/test_downloadermiddleware_retry.py E501 E128 E251 E303 E126
tests/test_downloadermiddleware_redirect.py E501 E128 E127
tests/test_downloadermiddleware_retry.py E501 E128 E251 E126
tests/test_downloadermiddleware_robotstxt.py E501
tests/test_downloadermiddleware_stats.py E501
tests/test_dupefilters.py E221 E501 E741 E128 E124
tests/test_engine.py E401 E501 E128
tests/test_exporters.py E501 E731 E306 E128 E124
tests/test_exporters.py E501 E731 E128 E124
tests/test_extension_telnet.py F841
tests/test_feedexport.py E501 F841 E241
tests/test_http_cookies.py E501
tests/test_http_headers.py E501
tests/test_http_request.py E402 E501 E127 E128 E128 E126 E123
tests/test_http_response.py E501 E301 E128 E265
tests/test_item.py E701 E128 F841 E306
tests/test_http_response.py E501 E128 E265
tests/test_item.py E701 E128 F841
tests/test_link.py E501
tests/test_linkextractors.py E501 E128 E124
tests/test_loader.py E501 E731 E303 E741 E128 E117 E241
tests/test_loader.py E501 E731 E741 E128 E117 E241
tests/test_logformatter.py E128 E501 E122
tests/test_mail.py E128 E501 E305
tests/test_mail.py E128 E501
tests/test_middleware.py E501 E128
tests/test_pipeline_crawl.py E131 E501 E128 E126
tests/test_pipeline_files.py E501 E303 E272 E226
tests/test_pipeline_images.py F841 E501 E303
tests/test_pipeline_media.py E501 E741 E731 E128 E306 E502
tests/test_pipeline_files.py E501 E272 E226
tests/test_pipeline_images.py F841 E501
tests/test_pipeline_media.py E501 E741 E731 E128 E502
tests/test_proxy_connect.py E501 E741
tests/test_request_cb_kwargs.py E501
tests/test_responsetypes.py E501 E305
tests/test_responsetypes.py E501
tests/test_robotstxt_interface.py E501 E501
tests/test_scheduler.py E501 E126 E123
tests/test_selector.py E501 E127
@ -230,24 +229,22 @@ flake8-ignore =
tests/test_spidermiddleware_referer.py E501 F841 E125 E201 E124 E501 E241 E121
tests/test_squeues.py E501 E701 E741
tests/test_utils_asyncio.py E501
tests/test_utils_conf.py E501 E303 E128
tests/test_utils_conf.py E501 E128
tests/test_utils_curl.py E501
tests/test_utils_datatypes.py E402 E501 E305
tests/test_utils_defer.py E306 E501 F841 E226
tests/test_utils_deprecate.py F841 E306 E501
tests/test_utils_datatypes.py E402 E501
tests/test_utils_defer.py E501 F841 E226
tests/test_utils_deprecate.py F841 E501
tests/test_utils_http.py E501 E128 W504
tests/test_utils_iterators.py E501 E128 E129 E303 E241
tests/test_utils_iterators.py E501 E128 E129 E241
tests/test_utils_log.py E741 E226
tests/test_utils_python.py E501 E303 E731 E701 E305
tests/test_utils_python.py E501 E731 E701
tests/test_utils_reqser.py E501 E128
tests/test_utils_request.py E501 E128 E305
tests/test_utils_request.py E501 E128
tests/test_utils_response.py E501
tests/test_utils_signal.py E741 F841 E731 E226
tests/test_utils_sitemap.py E128 E501 E124
tests/test_utils_spider.py E305
tests/test_utils_template.py E305
tests/test_utils_url.py E501 E127 E305 E211 E125 E501 E226 E241 E126 E123
tests/test_webclient.py E501 E128 E122 E303 E402 E306 E226 E241 E123 E126
tests/test_utils_url.py E501 E127 E211 E125 E501 E226 E241 E126 E123
tests/test_webclient.py E501 E128 E122 E402 E226 E241 E123 E126
tests/test_cmdline/__init__.py E501
tests/test_settings/__init__.py E501 E128
tests/test_spiderloader/__init__.py E128 E501

View File

@ -89,4 +89,5 @@ class ScrapyClientTLSOptions(ClientTLSOptions):
'from host "{}" (exception: {})'.format(
self._hostnameASCII, repr(e)))
DEFAULT_CIPHERS = AcceptableCiphers.fromOpenSSLCipherString('DEFAULT')

View File

@ -230,6 +230,7 @@ class ExecutionEngine(object):
def _download(self, request, spider):
slot = self.slot
slot.add_request(request)
def _on_success(response):
assert isinstance(response, (Response, Request))
if isinstance(response, Response):

View File

@ -116,4 +116,5 @@ class ResponseTypes(object):
cls = self.from_body(body)
return cls
responsetypes = ResponseTypes()

View File

@ -54,6 +54,7 @@ def _embed_standard_shell(namespace={}, banner=''):
else:
import rlcompleter # noqa: F401
readline.parse_and_bind("tab:complete")
@wraps(_embed_standard_shell)
def wrapper(namespace=namespace, banner=''):
code.interact(banner=banner, local=namespace)

View File

@ -42,6 +42,7 @@ def gunzip(data):
raise
return b''.join(output_list)
_is_gzipped = re.compile(br'^application/(x-)?gzip\b', re.I).search
_is_octetstream = re.compile(br'^(application|binary)/octet-stream\b', re.I).search

View File

@ -147,7 +147,6 @@ ITEM_PIPELINES = {'%s.pipelines.MyPipeline': 1}
self.url('/html')])
self.assertIn("DEBUG: It Works!", _textmode(stderr))
@defer.inlineCallbacks
def test_pipelines(self):
_, _, stderr = yield self.execute(['--spider', self.spider_name,

View File

@ -107,6 +107,7 @@ class CrawlerLoggingTestCase(unittest.TestCase):
def test_spider_custom_settings_log_level(self):
log_file = self.mktemp()
class MySpider(scrapy.Spider):
name = 'spider'
custom_settings = {

View File

@ -13,5 +13,6 @@ class ScrapyUtilsTest(unittest.TestCase):
installed_version = [int(x) for x in module.__version__.split('.')[:2]]
assert installed_version >= [0, 6], "OpenSSL >= 0.6 required"
if __name__ == "__main__":
unittest.main()

View File

@ -145,7 +145,6 @@ class CookiesMiddlewareTest(TestCase):
{'name': 'C3', 'value': 'value3', 'path': '/foo', 'domain': 'scrapytest.org'},
{'name': 'C4', 'value': 'value4', 'path': '/foo', 'domain': 'scrapy.org'}]
req = Request('http://scrapytest.org/', cookies=cookies)
self.mw.process_request(req, self.spider)

View File

@ -501,5 +501,6 @@ class RFC2616PolicyTest(DefaultStorageTest):
self.assertEqualResponse(res1, res2)
assert 'cached' in res2.flags
if __name__ == '__main__':
unittest.main()

View File

@ -68,7 +68,6 @@ class RedirectMiddlewareTest(unittest.TestCase):
assert isinstance(r, Response)
assert r is rsp
def test_redirect_302(self):
url = 'http://www.example.com/302'
url2 = 'http://www.example.com/redirected2'
@ -122,7 +121,6 @@ class RedirectMiddlewareTest(unittest.TestCase):
del rsp.headers['Location']
assert self.mw.process_response(req, rsp, self.spider) is rsp
def test_max_redirect_times(self):
self.mw.max_redirect_times = 1
req = Request('http://scrapytest.org/302')
@ -178,6 +176,7 @@ class RedirectMiddlewareTest(unittest.TestCase):
def test_request_meta_handling(self):
url = 'http://www.example.com/301'
url2 = 'http://www.example.com/redirected'
def _test_passthrough(req):
rsp = Response(url, headers={'Location': url2}, status=301, request=req)
r = self.mw.process_response(req, rsp, self.spider)
@ -316,5 +315,6 @@ class MetaRefreshMiddlewareTest(unittest.TestCase):
response = mw.process_response(req, rsp, self.spider)
assert isinstance(response, Response)
if __name__ == "__main__":
unittest.main()

View File

@ -312,6 +312,7 @@ class XmlItemExporterTest(BaseItemExporterTest):
for child in children]
else:
return [(elem.tag, [(elem.text, ())])]
def xmlsplit(xmlcontent):
doc = lxml.etree.fromstring(xmlcontent)
return xmltuple(doc)

View File

@ -182,6 +182,7 @@ class BaseResponseTest(unittest.TestCase):
def test_follow_whitespace_link(self):
self._assert_followed_url(Link('http://example.com/foo '),
'http://example.com/foo%20')
def test_follow_flags(self):
res = self.response_class('http://example.com/')
fol = res.follow('http://example.com/', flags=['cached', 'allowed'])

View File

@ -259,6 +259,7 @@ class ItemTest(unittest.TestCase):
with catch_warnings(record=True) as warnings:
item = Item()
self.assertEqual(len(warnings), 0)
class SubclassedItem(Item):
pass
subclassed_item = SubclassedItem()

View File

@ -121,5 +121,6 @@ class MailSenderTest(unittest.TestCase):
self.assertEqual(text.get_charset(), Charset('utf-8'))
self.assertEqual(attach.get_payload(decode=True).decode('utf-8'), body)
if __name__ == "__main__":
unittest.main()

View File

@ -286,7 +286,6 @@ class FilesPipelineTestCaseCustomSettings(unittest.TestCase):
self.assertEqual(pipeline.files_result_field, "this")
self.assertEqual(pipeline.files_urls_field, "that")
def test_user_defined_subclass_default_key_names(self):
"""Test situation when user defines subclass of FilesPipeline,
but uses attribute names for default pipeline (without prefixing

View File

@ -177,7 +177,6 @@ class ImagesPipelineTestCaseCustomSettings(unittest.TestCase):
IMAGES_RESULT_FIELD='images'
)
def setUp(self):
self.tempdir = mkdtemp()

View File

@ -304,6 +304,7 @@ class MediaPipelineTestCase(BaseMediaPipelineTestCase):
return response
rsp1 = Response('http://url')
def rsp1_func():
dfd = Deferred().addCallback(_check_downloading)
reactor.callLater(.1, dfd.callback, rsp1)

View File

@ -90,5 +90,6 @@ class ResponseTypesTest(unittest.TestCase):
# check that mime.types files shipped with scrapy are loaded
self.assertEqual(responsetypes.mimetypes.guess_type('x.scrapytest')[0], 'x-scrapy/test')
if __name__ == "__main__":
unittest.main()

View File

@ -83,7 +83,6 @@ class BuildComponentListTest(unittest.TestCase):
self.assertRaises(ValueError, build_component_list, {}, d, convert=lambda x: x)
class UtilsConfTestCase(unittest.TestCase):
def test_arglist_to_dict(self):

View File

@ -9,6 +9,7 @@ from scrapy.utils.defer import mustbe_deferred, process_chain, \
class MustbeDeferredTest(unittest.TestCase):
def test_success_function(self):
steps = []
def _append(v):
steps.append(v)
return steps
@ -20,6 +21,7 @@ class MustbeDeferredTest(unittest.TestCase):
def test_unfired_deferred(self):
steps = []
def _append(v):
steps.append(v)
dfd = defer.Deferred()

View File

@ -110,6 +110,7 @@ class WarnWhenSubclassedTest(unittest.TestCase):
# ignore subclassing warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
class UserClass(Deprecated):
pass
@ -233,6 +234,7 @@ class WarnWhenSubclassedTest(unittest.TestCase):
with warnings.catch_warnings(record=True) as w:
AlsoDeprecated()
class UserClass(AlsoDeprecated):
pass
@ -247,6 +249,7 @@ class WarnWhenSubclassedTest(unittest.TestCase):
with mock.patch('inspect.stack', side_effect=IndexError):
with warnings.catch_warnings(record=True) as w:
DeprecatedName = create_deprecated_class('DeprecatedName', NewName)
class SubClass(DeprecatedName):
pass

View File

@ -387,7 +387,6 @@ class TestHelper(unittest.TestCase):
self.assertTrue(type(r1) is type(r2))
self.assertTrue(type(r1) is not type(r3))
def _assert_type_and_value(self, a, b, obj):
self.assertTrue(type(a) is type(b),
'Got {}, expected {} for {!r}'.format(type(a), type(b), obj))

View File

@ -104,7 +104,6 @@ class BinaryIsTextTest(unittest.TestCase):
assert not binary_is_text(b"\x02\xa3")
class UtilsPythonTestCase(unittest.TestCase):
def test_equal_attributes(self):
@ -215,7 +214,6 @@ class UtilsPythonTestCase(unittest.TestCase):
self.assertEqual(
get_func_args(operator.itemgetter(2), stripself=True), ['obj'])
def test_without_none_values(self):
self.assertEqual(without_none_values([1, None, 3, 4]), [1, 3, 4])
self.assertEqual(without_none_values((1, None, 3, 4)), (1, 3, 4))
@ -223,5 +221,6 @@ class UtilsPythonTestCase(unittest.TestCase):
without_none_values({'one': 1, 'none': None, 'three': 3, 'four': 4}),
{'one': 1, 'three': 3, 'four': 4})
if __name__ == "__main__":
unittest.main()

View File

@ -83,5 +83,6 @@ class UtilsRequestTest(unittest.TestCase):
request_httprepr(Request("file:///tmp/foo.txt"))
request_httprepr(Request("ftp://localhost/tmp/foo.txt"))
if __name__ == "__main__":
unittest.main()

View File

@ -38,5 +38,6 @@ class UtilsRenderTemplateFileTestCase(unittest.TestCase):
os.remove(render_path)
assert not os.path.exists(render_path) # Failure of test iself
if '__main__' == __name__:
unittest.main()

View File

@ -201,6 +201,7 @@ def create_skipped_scheme_t(args):
assert url.startswith(args[1])
return do_expected
for k, args in enumerate ([
('/index', 'file://'),
('/index.html', 'file://'),

View File

@ -294,6 +294,7 @@ class WebClientTestCase(unittest.TestCase):
finished = self.assertFailure(
getPage(self.getURL("wait"), timeout=0.000001),
defer.TimeoutError)
def cleanup(passthrough):
# Clean up the server which is hanging around not doing
# anything.