mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-06 11:00:46 +00:00
Add flake8-pie rules to ruff.
This commit is contained in:
parent
e7595837a6
commit
93644f2c30
@ -242,6 +242,8 @@ extend-select = [
|
||||
"LOG",
|
||||
# pygrep-hooks
|
||||
"PGH",
|
||||
# flake8-pie
|
||||
"PIE",
|
||||
# flake8-quotes
|
||||
"Q",
|
||||
# flake8-return
|
||||
|
@ -61,7 +61,6 @@ class Command(ScrapyCommand):
|
||||
"""You can use this function to update the Scrapy objects that will be
|
||||
available in the shell
|
||||
"""
|
||||
pass
|
||||
|
||||
def run(self, args: list[str], opts: Namespace) -> None:
|
||||
url = args[0] if args else None
|
||||
|
@ -81,7 +81,6 @@ class BaseScheduler(metaclass=BaseSchedulerMeta):
|
||||
:param spider: the spider object for the current crawl
|
||||
:type spider: :class:`~scrapy.spiders.Spider`
|
||||
"""
|
||||
pass
|
||||
|
||||
def close(self, reason: str) -> Deferred[None] | None:
|
||||
"""
|
||||
@ -91,7 +90,6 @@ class BaseScheduler(metaclass=BaseSchedulerMeta):
|
||||
:param reason: a string which describes the reason why the spider was closed
|
||||
:type reason: :class:`str`
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def has_pending_requests(self) -> bool:
|
||||
|
@ -50,7 +50,6 @@ class BaseDupeFilter:
|
||||
|
||||
def log(self, request: Request, spider: Spider) -> None:
|
||||
"""Log that a request has been filtered"""
|
||||
pass
|
||||
|
||||
|
||||
class RFPDupeFilter(BaseDupeFilter):
|
||||
|
@ -13,8 +13,6 @@ from typing import Any
|
||||
class NotConfigured(Exception):
|
||||
"""Indicates a missing configuration situation"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class _InvalidOutput(TypeError):
|
||||
"""
|
||||
@ -22,8 +20,6 @@ class _InvalidOutput(TypeError):
|
||||
Internal and undocumented, it should not be raised or caught by user code.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
# HTTP and crawling
|
||||
|
||||
@ -35,8 +31,6 @@ class IgnoreRequest(Exception):
|
||||
class DontCloseSpider(Exception):
|
||||
"""Request the spider not to be closed yet"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CloseSpider(Exception):
|
||||
"""Raise this from callbacks to request the spider to be closed"""
|
||||
@ -64,14 +58,10 @@ class StopDownload(Exception):
|
||||
class DropItem(Exception):
|
||||
"""Drop item from the item pipeline"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NotSupported(Exception):
|
||||
"""Indicates a feature or method is not supported"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
# Commands
|
||||
|
||||
@ -89,10 +79,6 @@ class ScrapyDeprecationWarning(Warning):
|
||||
DeprecationWarning is silenced on Python 2.7+
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ContractFail(AssertionError):
|
||||
"""Error raised in case of a failing contract"""
|
||||
|
||||
pass
|
||||
|
@ -52,7 +52,6 @@ class RobotParser(metaclass=ABCMeta):
|
||||
:param robotstxt_body: content of a robots.txt_ file.
|
||||
:type robotstxt_body: bytes
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def allowed(self, url: str | bytes, user_agent: str | bytes) -> bool:
|
||||
@ -64,7 +63,6 @@ class RobotParser(metaclass=ABCMeta):
|
||||
:param user_agent: User agent
|
||||
:type user_agent: str or bytes
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class PythonRobotParser(RobotParser):
|
||||
|
@ -393,8 +393,8 @@ class DuplicateStartRequestsSpider(MockServerSpider):
|
||||
dupe_factor = 3
|
||||
|
||||
def start_requests(self):
|
||||
for i in range(0, self.distinct_urls):
|
||||
for j in range(0, self.dupe_factor):
|
||||
for i in range(self.distinct_urls):
|
||||
for j in range(self.dupe_factor):
|
||||
url = self.mockserver.url(f"/echo?headers=1&body=test{i}")
|
||||
yield Request(url, dont_filter=self.dont_filter)
|
||||
|
||||
|
@ -178,27 +178,23 @@ class TestSpider(Spider):
|
||||
"""method with no url
|
||||
@returns items 1 1
|
||||
"""
|
||||
pass
|
||||
|
||||
def custom_form(self, response):
|
||||
"""
|
||||
@url http://scrapy.org
|
||||
@custom_form
|
||||
"""
|
||||
pass
|
||||
|
||||
def invalid_regex(self, response):
|
||||
"""method with invalid regex
|
||||
@ Scrapy is awsome
|
||||
"""
|
||||
pass
|
||||
|
||||
def invalid_regex_with_valid_contract(self, response):
|
||||
"""method with invalid regex
|
||||
@ scrapy is awsome
|
||||
@url http://scrapy.org
|
||||
"""
|
||||
pass
|
||||
|
||||
def returns_request_meta(self, response):
|
||||
"""method which returns request
|
||||
@ -235,7 +231,6 @@ class CustomContractSuccessSpider(Spider):
|
||||
"""
|
||||
@custom_success_contract
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class CustomContractFailSpider(Spider):
|
||||
@ -245,7 +240,6 @@ class CustomContractFailSpider(Spider):
|
||||
"""
|
||||
@custom_fail_contract
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class InheritsTestSpider(TestSpider):
|
||||
|
@ -265,7 +265,7 @@ class MaxRetryTimesTest(unittest.TestCase):
|
||||
spider = spider or self.spider
|
||||
middleware = middleware or self.mw
|
||||
|
||||
for i in range(0, max_retry_times):
|
||||
for i in range(max_retry_times):
|
||||
req = middleware.process_exception(req, exception, spider)
|
||||
assert isinstance(req, Request)
|
||||
|
||||
|
@ -13,7 +13,7 @@ class TelnetExtensionTest(unittest.TestCase):
|
||||
console = TelnetConsole(crawler)
|
||||
|
||||
# This function has some side effects we don't need for this test
|
||||
console._get_telnet_vars = lambda: {}
|
||||
console._get_telnet_vars = dict
|
||||
|
||||
console.start_listening()
|
||||
protocol = console.protocol()
|
||||
|
@ -311,11 +311,11 @@ class FilesPipelineTestCaseFieldsDataClass(
|
||||
class FilesPipelineTestAttrsItem:
|
||||
name = attr.ib(default="")
|
||||
# default fields
|
||||
file_urls: list[str] = attr.ib(default=lambda: [])
|
||||
files: list[dict[str, str]] = attr.ib(default=lambda: [])
|
||||
file_urls: list[str] = attr.ib(default=list)
|
||||
files: list[dict[str, str]] = attr.ib(default=list)
|
||||
# overridden fields
|
||||
custom_file_urls: list[str] = attr.ib(default=lambda: [])
|
||||
custom_files: list[dict[str, str]] = attr.ib(default=lambda: [])
|
||||
custom_file_urls: list[str] = attr.ib(default=list)
|
||||
custom_files: list[dict[str, str]] = attr.ib(default=list)
|
||||
|
||||
|
||||
class FilesPipelineTestCaseFieldsAttrsItem(
|
||||
|
@ -295,11 +295,11 @@ class ImagesPipelineTestCaseFieldsDataClass(
|
||||
class ImagesPipelineTestAttrsItem:
|
||||
name = attr.ib(default="")
|
||||
# default fields
|
||||
image_urls: list[str] = attr.ib(default=lambda: [])
|
||||
images: list[dict[str, str]] = attr.ib(default=lambda: [])
|
||||
image_urls: list[str] = attr.ib(default=list)
|
||||
images: list[dict[str, str]] = attr.ib(default=list)
|
||||
# overridden fields
|
||||
custom_image_urls: list[str] = attr.ib(default=lambda: [])
|
||||
custom_images: list[dict[str, str]] = attr.ib(default=lambda: [])
|
||||
custom_image_urls: list[str] = attr.ib(default=list)
|
||||
custom_images: list[dict[str, str]] = attr.ib(default=list)
|
||||
|
||||
|
||||
class ImagesPipelineTestCaseFieldsAttrsItem(
|
||||
|
Loading…
x
Reference in New Issue
Block a user