1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-06 11:00:46 +00:00

Add Perflint rules to ruff.

This commit is contained in:
Andrey Rakhmatullin 2024-12-12 21:03:34 +05:00
parent c2832ed131
commit 1e4c81e9dc
8 changed files with 24 additions and 38 deletions

View File

@ -240,6 +240,8 @@ extend-select = [
"ISC",
# flake8-logging
"LOG",
# Perflint
"PERF",
# pygrep-hooks
"PGH",
# flake8-pie
@ -314,6 +316,8 @@ ignore = [
"D402",
# First word of the first line should be properly capitalized
"D403",
# `try`-`except` within a loop incurs performance overhead
"PERF203",
# Use of `assert` detected; needed for mypy
"S101",
# FTP-related functions are being called; https://github.com/scrapy/scrapy/issues/4180

View File

@ -7,10 +7,7 @@ _T = TypeVar("_T")
async def collect_asyncgen(result: AsyncIterable[_T]) -> list[_T]:
results = []
async for x in result:
results.append(x)
return results
return [x async for x in result]
async def as_async_generator(

View File

@ -235,8 +235,7 @@ def get_func_args(func: Callable[..., Any], stripself: bool = False) -> list[str
continue
args.append(name)
else:
for name in sig.parameters.keys():
args.append(name)
args = list(sig.parameters)
if stripself and args and args[0] == "self":
args = args[1:]

View File

@ -1757,13 +1757,13 @@ class FeedPostProcessedExportsTest(FeedExportTestBase):
crawler = get_crawler(spider_cls, settings)
yield crawler.crawl()
for file_path, feed_options in FEEDS.items():
for file_path in FEEDS:
content[str(file_path)] = (
Path(file_path).read_bytes() if Path(file_path).exists() else None
)
finally:
for file_path in FEEDS.keys():
for file_path in FEEDS:
if not Path(file_path).exists():
continue

View File

@ -289,10 +289,7 @@ class ProcessSpiderOutputNonIterableMiddleware:
class ProcessSpiderOutputCoroutineMiddleware:
async def process_spider_output(self, response, result, spider):
results = []
for r in result:
results.append(r)
return results
return result
class ProcessSpiderOutputInvalidResult(BaseAsyncSpiderMiddlewareTestCase):

View File

@ -8,9 +8,7 @@ class AsyncgenUtilsTest(unittest.TestCase):
@deferred_f_from_coro_f
async def test_as_async_generator(self):
ag = as_async_generator(range(42))
results = []
async for i in ag:
results.append(i)
results = [i async for i in ag]
self.assertEqual(results, list(range(42)))
@deferred_f_from_coro_f

View File

@ -26,15 +26,14 @@ class XmliterBaseTestCase:
"""
response = XmlResponse(url="http://example.com", body=body)
attrs = []
for x in self.xmliter(response, "product"):
attrs.append(
(
x.attrib["id"],
x.xpath("name/text()").getall(),
x.xpath("./type/text()").getall(),
)
attrs = [
(
x.attrib["id"],
x.xpath("name/text()").getall(),
x.xpath("./type/text()").getall(),
)
for x in self.xmliter(response, "product")
]
self.assertEqual(
attrs, [("001", ["Name 1"], ["Type 1"]), ("002", ["Name 2"], ["Type 2"])]
@ -99,15 +98,14 @@ class XmliterBaseTestCase:
# Unicode body needs encoding information
XmlResponse(url="http://example.com", body=body, encoding="utf-8"),
):
attrs = []
for x in self.xmliter(r, "þingflokkur"):
attrs.append(
(
x.attrib["id"],
x.xpath("./skammstafanir/stuttskammstöfun/text()").getall(),
x.xpath("./tímabil/fyrstaþing/text()").getall(),
)
attrs = [
(
x.attrib["id"],
x.xpath("./skammstafanir/stuttskammstöfun/text()").getall(),
x.xpath("./tímabil/fyrstaþing/text()").getall(),
)
for x in self.xmliter(r, "þingflokkur")
]
self.assertEqual(
attrs,

View File

@ -58,13 +58,6 @@ class MutableAsyncChainTest(unittest.TestCase):
for i in range(5, 7):
yield i
@staticmethod
async def collect_asyncgen_exc(asyncgen):
results = []
async for x in asyncgen:
results.append(x)
return results
@deferred_f_from_coro_f
async def test_mutableasyncchain(self):
m = MutableAsyncChain(self.g1(), as_async_generator(range(3, 7)))