mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-06 11:00:46 +00:00
Add flake8-raise rules to ruff.
This commit is contained in:
parent
897e124a27
commit
e7595837a6
@ -246,6 +246,8 @@ extend-select = [
|
||||
"Q",
|
||||
# flake8-return
|
||||
"RET",
|
||||
# flake8-raise
|
||||
"RSE",
|
||||
# flake8-bandit
|
||||
"S",
|
||||
# flake8-slots
|
||||
|
@ -22,7 +22,7 @@ class Command(BaseRunSpiderCommand):
|
||||
|
||||
def run(self, args: list[str], opts: argparse.Namespace) -> None:
|
||||
if len(args) < 1:
|
||||
raise UsageError()
|
||||
raise UsageError
|
||||
if len(args) > 1:
|
||||
raise UsageError(
|
||||
"running 'scrapy crawl' with more than one spider is not supported"
|
||||
|
@ -28,7 +28,7 @@ class Command(ScrapyCommand):
|
||||
|
||||
def run(self, args: list[str], opts: argparse.Namespace) -> None:
|
||||
if len(args) != 1:
|
||||
raise UsageError()
|
||||
raise UsageError
|
||||
|
||||
editor = self.settings["EDITOR"]
|
||||
assert self.crawler_process
|
||||
|
@ -68,7 +68,7 @@ class Command(ScrapyCommand):
|
||||
|
||||
def run(self, args: list[str], opts: Namespace) -> None:
|
||||
if len(args) != 1 or not is_url(args[0]):
|
||||
raise UsageError()
|
||||
raise UsageError
|
||||
request = Request(
|
||||
args[0],
|
||||
callback=self._print_response,
|
||||
|
@ -101,7 +101,7 @@ class Command(ScrapyCommand):
|
||||
print(template_file.read_text(encoding="utf-8"))
|
||||
return
|
||||
if len(args) != 2:
|
||||
raise UsageError()
|
||||
raise UsageError
|
||||
|
||||
name, url = args[0:2]
|
||||
url = verify_url_scheme(url)
|
||||
|
@ -399,7 +399,7 @@ class Command(BaseRunSpiderCommand):
|
||||
def run(self, args: list[str], opts: argparse.Namespace) -> None:
|
||||
# parse arguments
|
||||
if not len(args) == 1 or not is_url(args[0]):
|
||||
raise UsageError()
|
||||
raise UsageError
|
||||
url = args[0]
|
||||
|
||||
# prepare spidercls
|
||||
|
@ -43,7 +43,7 @@ class Command(BaseRunSpiderCommand):
|
||||
|
||||
def run(self, args: list[str], opts: argparse.Namespace) -> None:
|
||||
if len(args) != 1:
|
||||
raise UsageError()
|
||||
raise UsageError
|
||||
filename = Path(args[0])
|
||||
if not filename.exists():
|
||||
raise UsageError(f"File not found: {filename}\n")
|
||||
|
@ -92,7 +92,7 @@ class Command(ScrapyCommand):
|
||||
|
||||
def run(self, args: list[str], opts: argparse.Namespace) -> None:
|
||||
if len(args) not in (1, 2):
|
||||
raise UsageError()
|
||||
raise UsageError
|
||||
|
||||
project_name = args[0]
|
||||
|
||||
|
@ -98,7 +98,7 @@ class BaseScheduler(metaclass=BaseSchedulerMeta):
|
||||
"""
|
||||
``True`` if the scheduler has enqueued requests, ``False`` otherwise
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def enqueue_request(self, request: Request) -> bool:
|
||||
@ -112,7 +112,7 @@ class BaseScheduler(metaclass=BaseSchedulerMeta):
|
||||
For reference, the default Scrapy scheduler returns ``False`` when the
|
||||
request is rejected by the dupefilter.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def next_request(self) -> Request | None:
|
||||
@ -124,7 +124,7 @@ class BaseScheduler(metaclass=BaseSchedulerMeta):
|
||||
to the downloader in the current reactor cycle. The engine will continue
|
||||
calling ``next_request`` until ``has_pending_requests`` is ``False``.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Scheduler(BaseScheduler):
|
||||
|
@ -293,12 +293,12 @@ class MediaPipeline(ABC):
|
||||
self, request: Request, info: SpiderInfo, *, item: Any = None
|
||||
) -> Deferred[FileInfo | None]:
|
||||
"""Check request before starting download"""
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def get_media_requests(self, item: Any, info: SpiderInfo) -> list[Request]:
|
||||
"""Returns the media requests to download"""
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def media_downloaded(
|
||||
@ -310,14 +310,14 @@ class MediaPipeline(ABC):
|
||||
item: Any = None,
|
||||
) -> FileInfo:
|
||||
"""Handler for success downloads"""
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def media_failed(
|
||||
self, failure: Failure, request: Request, info: SpiderInfo
|
||||
) -> NoReturn:
|
||||
"""Handler for failed downloads"""
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
def item_completed(
|
||||
self, results: list[FileInfoOrError], item: Any, info: SpiderInfo
|
||||
@ -345,4 +345,4 @@ class MediaPipeline(ABC):
|
||||
item: Any = None,
|
||||
) -> str:
|
||||
"""Returns the path where downloaded media should be stored"""
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
@ -76,7 +76,7 @@ class HostResolution:
|
||||
self.name: str = name
|
||||
|
||||
def cancel(self) -> None:
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@provider(IResolutionReceiver)
|
||||
|
@ -51,7 +51,7 @@ class ReferrerPolicy:
|
||||
name: str
|
||||
|
||||
def referrer(self, response_url: str, request_url: str) -> str | None:
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
def stripped_referrer(self, url: str) -> str | None:
|
||||
if urlparse(url).scheme not in self.NOREFERRER_SCHEMES:
|
||||
|
@ -64,7 +64,7 @@ class AddonManagerTest(unittest.TestCase):
|
||||
def test_notconfigured(self):
|
||||
class NotConfiguredAddon:
|
||||
def update_settings(self, settings):
|
||||
raise NotConfigured()
|
||||
raise NotConfigured
|
||||
|
||||
settings_dict = {
|
||||
"ADDONS": {NotConfiguredAddon: 0},
|
||||
|
@ -894,7 +894,7 @@ class S3TestCase(unittest.TestCase):
|
||||
except Exception as e:
|
||||
self.assertIsInstance(e, (TypeError, NotConfigured))
|
||||
else:
|
||||
raise AssertionError()
|
||||
raise AssertionError
|
||||
|
||||
def test_request_signing1(self):
|
||||
# gets an object from the johnsmith bucket.
|
||||
|
@ -178,7 +178,7 @@ class ProcessExceptionInvalidOutput(ManagerTestCase):
|
||||
|
||||
class InvalidProcessExceptionMiddleware:
|
||||
def process_request(self, request, spider):
|
||||
raise Exception()
|
||||
raise Exception
|
||||
|
||||
def process_exception(self, request, exception, spider):
|
||||
return 1
|
||||
|
@ -59,7 +59,7 @@ class HttpCompressionTest(TestCase):
|
||||
|
||||
def _getresponse(self, coding):
|
||||
if coding not in FORMAT:
|
||||
raise ValueError()
|
||||
raise ValueError
|
||||
|
||||
samplefile, contentencoding = FORMAT[coding]
|
||||
|
||||
|
@ -78,7 +78,7 @@ class ProcessSpiderExceptionInvalidOutput(SpiderMiddlewareTestCase):
|
||||
|
||||
class RaiseExceptionProcessSpiderOutputMiddleware:
|
||||
def process_spider_output(self, response, result, spider):
|
||||
raise Exception()
|
||||
raise Exception
|
||||
|
||||
self.mwman._add_middleware(InvalidProcessSpiderOutputExceptionMiddleware())
|
||||
self.mwman._add_middleware(RaiseExceptionProcessSpiderOutputMiddleware())
|
||||
|
@ -43,7 +43,7 @@ class RecoverySpider(Spider):
|
||||
yield {"test": 1}
|
||||
self.logger.info("DONT_FAIL: %s", response.meta.get("dont_fail"))
|
||||
if not response.meta.get("dont_fail"):
|
||||
raise TabError()
|
||||
raise TabError
|
||||
|
||||
|
||||
class RecoveryAsyncGenSpider(RecoverySpider):
|
||||
@ -59,7 +59,7 @@ class RecoveryAsyncGenSpider(RecoverySpider):
|
||||
class FailProcessSpiderInputMiddleware:
|
||||
def process_spider_input(self, response, spider):
|
||||
spider.logger.info("Middleware: will raise IndexError")
|
||||
raise IndexError()
|
||||
raise IndexError
|
||||
|
||||
|
||||
class ProcessSpiderInputSpiderWithoutErrback(Spider):
|
||||
@ -109,14 +109,14 @@ class GeneratorCallbackSpider(Spider):
|
||||
def parse(self, response):
|
||||
yield {"test": 1}
|
||||
yield {"test": 2}
|
||||
raise ImportError()
|
||||
raise ImportError
|
||||
|
||||
|
||||
class AsyncGeneratorCallbackSpider(GeneratorCallbackSpider):
|
||||
async def parse(self, response):
|
||||
yield {"test": 1}
|
||||
yield {"test": 2}
|
||||
raise ImportError()
|
||||
raise ImportError
|
||||
|
||||
|
||||
# ================================================================================
|
||||
@ -176,7 +176,7 @@ class GeneratorFailMiddleware:
|
||||
for r in result:
|
||||
r["processed"].append(f"{self.__class__.__name__}.process_spider_output")
|
||||
yield r
|
||||
raise LookupError()
|
||||
raise LookupError
|
||||
|
||||
def process_spider_exception(self, response, exception, spider):
|
||||
method = f"{self.__class__.__name__}.process_spider_exception"
|
||||
@ -246,7 +246,7 @@ class NotGeneratorFailMiddleware:
|
||||
for r in result:
|
||||
r["processed"].append(f"{self.__class__.__name__}.process_spider_output")
|
||||
out.append(r)
|
||||
raise ReferenceError()
|
||||
raise ReferenceError
|
||||
return out
|
||||
|
||||
def process_spider_exception(self, response, exception, spider):
|
||||
|
@ -41,7 +41,7 @@ class BaseQueueTestCase(unittest.TestCase):
|
||||
|
||||
class RequestQueueTestMixin:
|
||||
def queue(self):
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
def test_one_element_with_peek(self):
|
||||
if not hasattr(queuelib.queue.FifoMemoryQueue, "peek"):
|
||||
|
@ -10,7 +10,7 @@ from scrapy.utils.misc import (
|
||||
|
||||
|
||||
def _indentation_error(*args, **kwargs):
|
||||
raise IndentationError()
|
||||
raise IndentationError
|
||||
|
||||
|
||||
def top_level_return_something():
|
||||
|
Loading…
x
Reference in New Issue
Block a user