1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-06 11:00:46 +00:00

flake8-comprehensions

This commit is contained in:
Andrey Rakhmatullin 2024-06-19 23:36:36 +05:00
parent a617e04d2e
commit 1c70d3e605
8 changed files with 22 additions and 23 deletions

View File

@ -9,6 +9,7 @@ repos:
hooks:
- id: flake8
additional_dependencies:
- flake8-comprehensions
- flake8-debugger
- flake8-string-format
- repo: https://github.com/psf/black.git

View File

@ -104,7 +104,7 @@ class ItemFilter:
for item_class in feed_options.get("item_classes") or ()
)
else:
self.item_classes = tuple()
self.item_classes = ()
def accepts(self, item: Any) -> bool:
"""

View File

@ -200,7 +200,7 @@ def get_permissions_dict(
path_obj = Path(path)
renamings = renamings or tuple()
renamings = renamings or ()
permissions_dict = {
".": get_permissions(path_obj),
}

View File

@ -1356,7 +1356,7 @@ class FeedExportTest(FeedExportTestBase):
@defer.inlineCallbacks
def test_export_encoding(self):
items = [dict({"foo": "Test\xd6"})]
items = [{"foo": "Test\xd6"}]
formats = {
"json": b'[{"foo": "Test\\u00d6"}]',
@ -1401,7 +1401,7 @@ class FeedExportTest(FeedExportTestBase):
@defer.inlineCallbacks
def test_export_multiple_configs(self):
items = [dict({"foo": "FOO", "bar": "BAR"})]
items = [{"foo": "FOO", "bar": "BAR"}]
formats = {
"json": b'[\n{"bar": "BAR"}\n]',
@ -2513,8 +2513,8 @@ class BatchDeliveriesTest(FeedExportTestBase):
@defer.inlineCallbacks
def test_export_multiple_configs(self):
items = [
dict({"foo": "FOO", "bar": "BAR"}),
dict({"foo": "FOO1", "bar": "BAR1"}),
{"foo": "FOO", "bar": "BAR"},
{"foo": "FOO1", "bar": "BAR1"},
]
formats = {
@ -2574,7 +2574,7 @@ class BatchDeliveriesTest(FeedExportTestBase):
@defer.inlineCallbacks
def test_batch_item_count_feeds_setting(self):
items = [dict({"foo": "FOO"}), dict({"foo": "FOO1"})]
items = [{"foo": "FOO"}, {"foo": "FOO1"}]
formats = {
"json": [
b'[{"foo": "FOO"}]',

View File

@ -156,7 +156,7 @@ class InitializationTestMixin:
self.assertEqual(il.get_output_value("name"), ["foo"])
loaded_item = il.load_item()
self.assertIsInstance(loaded_item, self.item_class)
self.assertEqual(ItemAdapter(loaded_item).asdict(), dict({"name": ["foo"]}))
self.assertEqual(ItemAdapter(loaded_item).asdict(), {"name": ["foo"]})
def test_get_output_value_list(self):
"""Getting output value must not remove value from item"""
@ -165,9 +165,7 @@ class InitializationTestMixin:
self.assertEqual(il.get_output_value("name"), ["foo", "bar"])
loaded_item = il.load_item()
self.assertIsInstance(loaded_item, self.item_class)
self.assertEqual(
ItemAdapter(loaded_item).asdict(), dict({"name": ["foo", "bar"]})
)
self.assertEqual(ItemAdapter(loaded_item).asdict(), {"name": ["foo", "bar"]})
def test_values_single(self):
"""Values from initial item must be added to loader._values"""

View File

@ -526,7 +526,7 @@ class InitializationFromDictTest(unittest.TestCase):
self.assertEqual(il.get_output_value("name"), ["foo"])
loaded_item = il.load_item()
self.assertIsInstance(loaded_item, self.item_class)
self.assertEqual(loaded_item, dict({"name": ["foo"]}))
self.assertEqual(loaded_item, {"name": ["foo"]})
def test_get_output_value_list(self):
"""Getting output value must not remove value from item"""
@ -535,7 +535,7 @@ class InitializationFromDictTest(unittest.TestCase):
self.assertEqual(il.get_output_value("name"), ["foo", "bar"])
loaded_item = il.load_item()
self.assertIsInstance(loaded_item, self.item_class)
self.assertEqual(loaded_item, dict({"name": ["foo", "bar"]}))
self.assertEqual(loaded_item, {"name": ["foo", "bar"]})
def test_values_single(self):
"""Values from initial item must be added to loader._values"""

View File

@ -284,7 +284,7 @@ class DownloaderAwareSchedulerTestMixin:
downloader.decrement(slot)
self.assertTrue(
_is_scheduling_fair(list(s for u, s in _URLS_WITH_SLOTS), dequeued_slots)
_is_scheduling_fair([s for u, s in _URLS_WITH_SLOTS], dequeued_slots)
)
self.assertEqual(sum(len(s.active) for s in downloader.slots.values()), 0)

View File

@ -244,7 +244,7 @@ class CrawlSpiderTest(SpiderTest):
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertTrue(all(isinstance(r, Request) for r in output))
self.assertEqual(
[r.url for r in output],
[
@ -270,7 +270,7 @@ class CrawlSpiderTest(SpiderTest):
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertTrue(all(isinstance(r, Request) for r in output))
self.assertEqual(
[r.url for r in output],
[
@ -299,7 +299,7 @@ class CrawlSpiderTest(SpiderTest):
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 2)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertTrue(all(isinstance(r, Request) for r in output))
self.assertEqual(
[r.url for r in output],
[
@ -324,7 +324,7 @@ class CrawlSpiderTest(SpiderTest):
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertTrue(all(isinstance(r, Request) for r in output))
self.assertEqual(
[r.url for r in output],
[
@ -352,7 +352,7 @@ class CrawlSpiderTest(SpiderTest):
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertTrue(all(isinstance(r, Request) for r in output))
self.assertEqual(
[r.url for r in output],
[
@ -383,7 +383,7 @@ class CrawlSpiderTest(SpiderTest):
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertTrue(all(isinstance(r, Request) for r in output))
self.assertEqual(
[r.url for r in output],
[
@ -413,7 +413,7 @@ class CrawlSpiderTest(SpiderTest):
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertTrue(all(isinstance(r, Request) for r in output))
self.assertEqual(
[r.url for r in output],
[
@ -445,7 +445,7 @@ class CrawlSpiderTest(SpiderTest):
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertTrue(all(isinstance(r, Request) for r in output))
self.assertEqual(
[r.url for r in output],
[
@ -637,7 +637,7 @@ Sitemap: /sitemap-relative-url.xml
class FilteredSitemapSpider(self.spider_class):
def sitemap_filter(self, entries):
for entry in entries:
alternate_links = entry.get("alternate", tuple())
alternate_links = entry.get("alternate", ())
for link in alternate_links:
if "/deutsch/" in link:
entry["loc"] = link