From 4270e0a0da66a2cb3a8e904c5ea74f84b7f9d041 Mon Sep 17 00:00:00 2001 From: Eugenio Lacuesta Date: Sat, 4 Apr 2020 21:51:02 -0300 Subject: [PATCH] Fix E731: do not assign a lambda expression --- pytest.ini | 24 +++++++++++----------- scrapy/commands/fetch.py | 4 ++-- scrapy/commands/parse.py | 4 ++-- scrapy/core/downloader/webclient.py | 9 ++++---- scrapy/linkextractors/__init__.py | 10 +++++++-- scrapy/linkextractors/lxmlhtml.py | 6 ++---- tests/test_downloadermiddleware_cookies.py | 7 +++---- tests/test_exporters.py | 11 +++++----- tests/test_pipeline_media.py | 18 ++++++++++------ tests/test_utils_python.py | 4 +++- tests/test_utils_signal.py | 4 +++- 11 files changed, 57 insertions(+), 44 deletions(-) diff --git a/pytest.ini b/pytest.ini index 141a13a4f..4b655b8d5 100644 --- a/pytest.ini +++ b/pytest.ini @@ -35,9 +35,9 @@ flake8-ignore = scrapy/commands/check.py E501 scrapy/commands/crawl.py E501 scrapy/commands/edit.py E501 - scrapy/commands/fetch.py E401 E501 E128 E731 + scrapy/commands/fetch.py E401 E501 E128 scrapy/commands/genspider.py E128 E501 E502 - scrapy/commands/parse.py E128 E501 E731 + scrapy/commands/parse.py E128 E501 scrapy/commands/runspider.py E501 scrapy/commands/settings.py E128 scrapy/commands/shell.py E128 E501 E502 @@ -50,12 +50,12 @@ flake8-ignore = scrapy/core/engine.py E501 E128 E127 E502 scrapy/core/scheduler.py E501 scrapy/core/scraper.py E501 E128 W504 - scrapy/core/spidermw.py E501 E731 E126 + scrapy/core/spidermw.py E501 E126 scrapy/core/downloader/__init__.py E501 scrapy/core/downloader/contextfactory.py E501 E128 E126 scrapy/core/downloader/middleware.py E501 E502 scrapy/core/downloader/tls.py E501 E241 - scrapy/core/downloader/webclient.py E731 E501 E128 E126 + scrapy/core/downloader/webclient.py E501 E128 E126 scrapy/core/downloader/handlers/__init__.py E501 scrapy/core/downloader/handlers/ftp.py E501 E128 E127 scrapy/core/downloader/handlers/http10.py E501 @@ -90,8 +90,8 @@ flake8-ignore = scrapy/http/response/__init__.py E501 E128 scrapy/http/response/text.py E501 E128 E124 # scrapy/linkextractors - scrapy/linkextractors/__init__.py E731 E501 E402 W504 - scrapy/linkextractors/lxmlhtml.py E501 E731 + scrapy/linkextractors/__init__.py E501 E402 W504 + scrapy/linkextractors/lxmlhtml.py E501 # scrapy/loader scrapy/loader/__init__.py E501 E128 scrapy/loader/processors.py E501 @@ -184,7 +184,7 @@ flake8-ignore = tests/test_downloader_handlers.py E124 E127 E128 E265 E501 E126 E123 tests/test_downloadermiddleware.py E501 tests/test_downloadermiddleware_ajaxcrawlable.py E501 - tests/test_downloadermiddleware_cookies.py E731 E741 E501 E128 E265 E126 + tests/test_downloadermiddleware_cookies.py E741 E501 E128 E265 E126 tests/test_downloadermiddleware_decompression.py E127 tests/test_downloadermiddleware_defaultheaders.py E501 tests/test_downloadermiddleware_downloadtimeout.py E501 @@ -197,7 +197,7 @@ flake8-ignore = tests/test_downloadermiddleware_stats.py E501 tests/test_dupefilters.py E501 E741 E128 E124 tests/test_engine.py E401 E501 E128 - tests/test_exporters.py E501 E731 E128 E124 + tests/test_exporters.py E501 E128 E124 tests/test_extension_telnet.py F841 tests/test_feedexport.py E501 F841 E241 tests/test_http_cookies.py E501 @@ -207,14 +207,14 @@ flake8-ignore = tests/test_item.py E128 F841 tests/test_link.py E501 tests/test_linkextractors.py E501 E128 E124 - tests/test_loader.py E501 E731 E741 E128 E117 E241 + tests/test_loader.py E501 E741 E128 E117 E241 tests/test_logformatter.py E128 E501 E122 tests/test_mail.py E128 E501 tests/test_middleware.py E501 E128 tests/test_pipeline_crawl.py E501 E128 E126 tests/test_pipeline_files.py E501 tests/test_pipeline_images.py F841 E501 - tests/test_pipeline_media.py E501 E741 E731 E128 E502 + tests/test_pipeline_media.py E501 E741 E128 E502 tests/test_proxy_connect.py E501 E741 tests/test_request_cb_kwargs.py E501 tests/test_responsetypes.py E501 @@ -237,11 +237,11 @@ flake8-ignore = tests/test_utils_http.py E501 E128 W504 tests/test_utils_iterators.py E501 E128 E129 E241 tests/test_utils_log.py E741 - tests/test_utils_python.py E501 E731 + tests/test_utils_python.py E501 tests/test_utils_reqser.py E501 E128 tests/test_utils_request.py E501 E128 tests/test_utils_response.py E501 - tests/test_utils_signal.py E741 F841 E731 + tests/test_utils_signal.py E741 F841 tests/test_utils_sitemap.py E128 E501 E124 tests/test_utils_url.py E501 E127 E125 E501 E241 E126 E123 tests/test_webclient.py E501 E128 E122 E402 E241 E123 E126 diff --git a/scrapy/commands/fetch.py b/scrapy/commands/fetch.py index 0e149941d..506d1f1b7 100644 --- a/scrapy/commands/fetch.py +++ b/scrapy/commands/fetch.py @@ -49,8 +49,8 @@ class Command(ScrapyCommand): def run(self, args, opts): if len(args) != 1 or not is_url(args[0]): raise UsageError() - cb = lambda x: self._print_response(x, opts) - request = Request(args[0], callback=cb, dont_filter=True) + request = Request(args[0], callback=self._print_response, + cb_kwargs={"opts": opts}, dont_filter=True) # by default, let the framework handle redirects, # i.e. command handles all codes expect 3xx if not opts.no_redirect: diff --git a/scrapy/commands/parse.py b/scrapy/commands/parse.py index 3ef8ddcb3..d5abe5930 100644 --- a/scrapy/commands/parse.py +++ b/scrapy/commands/parse.py @@ -147,8 +147,8 @@ class Command(ScrapyCommand): logger.error('Unable to find spider for: %(url)s', {'url': url}) # Request requires callback argument as callable or None, not string - request = Request(url, None) - _start_requests = lambda s: [self.prepare_request(s, request, opts)] + def _start_requests(spider): + yield self.prepare_request(spider, Request(url, None), opts) self.spidercls.start_requests = _start_requests def start_parsing(self, url, opts): diff --git a/scrapy/core/downloader/webclient.py b/scrapy/core/downloader/webclient.py index a71dc5fb3..a90a77b2b 100644 --- a/scrapy/core/downloader/webclient.py +++ b/scrapy/core/downloader/webclient.py @@ -14,13 +14,12 @@ from scrapy.responsetypes import responsetypes def _parsed_url_args(parsed): # Assume parsed is urlparse-d from Request.url, # which was passed via safe_url_string and is ascii-only. - b = lambda s: to_bytes(s, encoding='ascii') path = urlunparse(('', '', parsed.path or '/', parsed.params, parsed.query, '')) - path = b(path) - host = b(parsed.hostname) + path = to_bytes(path, encoding="ascii") + host = to_bytes(parsed.hostname, encoding="ascii") port = parsed.port - scheme = b(parsed.scheme) - netloc = b(parsed.netloc) + scheme = to_bytes(parsed.scheme, encoding="ascii") + netloc = to_bytes(parsed.netloc, encoding="ascii") if port is None: port = 443 if scheme == b'https' else 80 return scheme, netloc, host, port, path diff --git a/scrapy/linkextractors/__init__.py b/scrapy/linkextractors/__init__.py index 6afe867b5..d0b5066b6 100644 --- a/scrapy/linkextractors/__init__.py +++ b/scrapy/linkextractors/__init__.py @@ -45,8 +45,14 @@ IGNORED_EXTENSIONS = [ _re_type = type(re.compile("", 0)) -_matches = lambda url, regexs: any(r.search(url) for r in regexs) -_is_valid_url = lambda url: url.split('://', 1)[0] in {'http', 'https', 'file', 'ftp'} + + +def _matches(url, regexs): + return any(r.search(url) for r in regexs) + + +def _is_valid_url(url): + return url.split('://', 1)[0] in {'http', 'https', 'file', 'ftp'} class FilteringLinkExtractor: diff --git a/scrapy/linkextractors/lxmlhtml.py b/scrapy/linkextractors/lxmlhtml.py index fbac1dc59..ceb37c5f1 100644 --- a/scrapy/linkextractors/lxmlhtml.py +++ b/scrapy/linkextractors/lxmlhtml.py @@ -98,11 +98,9 @@ class LxmlLinkExtractor(FilteringLinkExtractor): unique=True, process_value=None, deny_extensions=None, restrict_css=(), strip=True, restrict_text=None): tags, attrs = set(arg_to_iter(tags)), set(arg_to_iter(attrs)) - tag_func = lambda x: x in tags - attr_func = lambda x: x in attrs lx = LxmlParserLinkExtractor( - tag=tag_func, - attr=attr_func, + tag=lambda x: x in tags, + attr=lambda x: x in attrs, unique=unique, process=process_value, strip=strip, diff --git a/tests/test_downloadermiddleware_cookies.py b/tests/test_downloadermiddleware_cookies.py index 051f66680..a8182e2ef 100644 --- a/tests/test_downloadermiddleware_cookies.py +++ b/tests/test_downloadermiddleware_cookies.py @@ -13,10 +13,9 @@ from scrapy.downloadermiddlewares.cookies import CookiesMiddleware class CookiesMiddlewareTest(TestCase): def assertCookieValEqual(self, first, second, msg=None): - cookievaleq = lambda cv: re.split(r';\s*', cv.decode('latin1')) - return self.assertEqual( - sorted(cookievaleq(first)), - sorted(cookievaleq(second)), msg) + def split_cookies(cookies): + return sorted(re.split(r";\s*", cookies.decode("latin1"))) + return self.assertEqual(split_cookies(first), split_cookies(second), msg=msg) def setUp(self): self.spider = Spider('foo') diff --git a/tests/test_exporters.py b/tests/test_exporters.py index 6e2507508..160912847 100644 --- a/tests/test_exporters.py +++ b/tests/test_exporters.py @@ -215,11 +215,12 @@ class CsvItemExporterTest(BaseItemExporterTest): return CsvItemExporter(self.output, **kwargs) def assertCsvEqual(self, first, second, msg=None): - first = to_unicode(first) - second = to_unicode(second) - csvsplit = lambda csv: [sorted(re.split(r'(,|\s+)', line)) - for line in csv.splitlines(True)] - return self.assertEqual(csvsplit(first), csvsplit(second), msg) + def split_csv(csv): + return [ + sorted(re.split(r"(,|\s+)", line)) + for line in to_unicode(csv).splitlines(True) + ] + return self.assertEqual(split_csv(first), split_csv(second), msg=msg) def _check_output(self): self.assertCsvEqual(to_unicode(self.output.getvalue()), u'age,name\r\n22,John\xa3\r\n') diff --git a/tests/test_pipeline_media.py b/tests/test_pipeline_media.py index d369e147d..f84f47816 100644 --- a/tests/test_pipeline_media.py +++ b/tests/test_pipeline_media.py @@ -199,12 +199,19 @@ class MediaPipelineTestCase(BaseMediaPipelineTestCase): pipeline_class = MockedMediaPipeline + def _callback(self, result): + self.pipe._mockcalled.append('request_callback') + return result + + def _errback(self, result): + self.pipe._mockcalled.append('request_errback') + return result + @inlineCallbacks def test_result_succeed(self): - cb = lambda _: self.pipe._mockcalled.append('request_callback') or _ - eb = lambda _: self.pipe._mockcalled.append('request_errback') or _ rsp = Response('http://url1') - req = Request('http://url1', meta=dict(response=rsp), callback=cb, errback=eb) + req = Request('http://url1', meta=dict(response=rsp), + callback=self._callback, errback=self._errback) item = dict(requests=req) new_item = yield self.pipe.process_item(item, self.spider) self.assertEqual(new_item['results'], [(True, rsp)]) @@ -215,10 +222,9 @@ class MediaPipelineTestCase(BaseMediaPipelineTestCase): @inlineCallbacks def test_result_failure(self): self.pipe.LOG_FAILED_RESULTS = False - cb = lambda _: self.pipe._mockcalled.append('request_callback') or _ - eb = lambda _: self.pipe._mockcalled.append('request_errback') or _ fail = Failure(Exception()) - req = Request('http://url1', meta=dict(response=fail), callback=cb, errback=eb) + req = Request('http://url1', meta=dict(response=fail), + callback=self._callback, errback=self._errback) item = dict(requests=req) new_item = yield self.pipe.process_item(item, self.spider) self.assertEqual(new_item['results'], [(False, fail)]) diff --git a/tests/test_utils_python.py b/tests/test_utils_python.py index 8cb8df15b..65e6ba876 100644 --- a/tests/test_utils_python.py +++ b/tests/test_utils_python.py @@ -145,7 +145,9 @@ class UtilsPythonTestCase(unittest.TestCase): get_z = operator.itemgetter('z') get_meta = operator.attrgetter('meta') - compare_z = lambda obj: get_z(get_meta(obj)) + + def compare_z(obj): + return get_z(get_meta(obj)) self.assertTrue(equal_attributes(a, b, [compare_z, 'x'])) # fail z equality diff --git a/tests/test_utils_signal.py b/tests/test_utils_signal.py index 9f6da09ed..bb211dc60 100644 --- a/tests/test_utils_signal.py +++ b/tests/test_utils_signal.py @@ -90,8 +90,10 @@ class SendCatchLogDeferredAsyncioTest(SendCatchLogDeferredTest): class SendCatchLogTest2(unittest.TestCase): def test_error_logged_if_deferred_not_supported(self): + def test_handler(): + return defer.Deferred() + test_signal = object() - test_handler = lambda: defer.Deferred() dispatcher.connect(test_handler, test_signal) with LogCapture() as l: send_catch_log(test_signal)