mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-06 10:24:24 +00:00
Add pylint rules to ruff, refresh the ignore list of pylint itself (#6584)
This commit is contained in:
parent
57a5460529
commit
7dd92e6e43
@ -122,6 +122,9 @@ profile = "black"
|
||||
[tool.pylint.MASTER]
|
||||
persistent = "no"
|
||||
jobs = 1 # >1 hides results
|
||||
extension-pkg-allow-list=[
|
||||
"lxml",
|
||||
]
|
||||
|
||||
[tool.pylint."MESSAGES CONTROL"]
|
||||
disable = [
|
||||
@ -129,11 +132,7 @@ disable = [
|
||||
"arguments-differ",
|
||||
"arguments-renamed",
|
||||
"attribute-defined-outside-init",
|
||||
"bad-classmethod-argument",
|
||||
"bare-except",
|
||||
"broad-except",
|
||||
"broad-exception-raised",
|
||||
"c-extension-no-member",
|
||||
"broad-exception-caught",
|
||||
"consider-using-with",
|
||||
"cyclic-import",
|
||||
"dangerous-default-value",
|
||||
@ -141,9 +140,6 @@ disable = [
|
||||
"duplicate-code", # https://github.com/PyCQA/pylint/issues/214
|
||||
"eval-used",
|
||||
"fixme",
|
||||
"function-redefined",
|
||||
"global-statement",
|
||||
"implicit-str-concat",
|
||||
"import-error",
|
||||
"import-outside-toplevel",
|
||||
"inherit-non-class",
|
||||
@ -155,7 +151,6 @@ disable = [
|
||||
"logging-format-interpolation",
|
||||
"logging-fstring-interpolation",
|
||||
"logging-not-lazy",
|
||||
"lost-exception",
|
||||
"missing-docstring",
|
||||
"no-member",
|
||||
"no-method-argument",
|
||||
@ -169,13 +164,11 @@ disable = [
|
||||
"raise-missing-from",
|
||||
"redefined-builtin",
|
||||
"redefined-outer-name",
|
||||
"reimported",
|
||||
"signature-differs",
|
||||
"too-few-public-methods",
|
||||
"too-many-ancestors",
|
||||
"too-many-arguments",
|
||||
"too-many-branches",
|
||||
"too-many-format-args",
|
||||
"too-many-function-args",
|
||||
"too-many-instance-attributes",
|
||||
"too-many-lines",
|
||||
@ -184,14 +177,11 @@ disable = [
|
||||
"too-many-return-statements",
|
||||
"unbalanced-tuple-unpacking",
|
||||
"unnecessary-dunder-call",
|
||||
"unnecessary-pass",
|
||||
"unreachable",
|
||||
"unused-argument",
|
||||
"unused-import",
|
||||
"unused-variable",
|
||||
"used-before-assignment",
|
||||
"useless-return",
|
||||
"wildcard-import",
|
||||
"wrong-import-position",
|
||||
]
|
||||
|
||||
@ -246,6 +236,8 @@ extend-select = [
|
||||
"PGH",
|
||||
# flake8-pie
|
||||
"PIE",
|
||||
# pylint
|
||||
"PL",
|
||||
# flake8-pyi
|
||||
"PYI",
|
||||
# flake8-quotes
|
||||
@ -320,6 +312,18 @@ ignore = [
|
||||
"D403",
|
||||
# `try`-`except` within a loop incurs performance overhead
|
||||
"PERF203",
|
||||
# Too many return statements
|
||||
"PLR0911",
|
||||
# Too many branches
|
||||
"PLR0912",
|
||||
# Too many arguments in function definition
|
||||
"PLR0913",
|
||||
# Too many statements
|
||||
"PLR0915",
|
||||
# Magic value used in comparison
|
||||
"PLR2004",
|
||||
# `for` loop variable overwritten by assignment target
|
||||
"PLW2901",
|
||||
# Use of `assert` detected; needed for mypy
|
||||
"S101",
|
||||
# FTP-related functions are being called; https://github.com/scrapy/scrapy/issues/4180
|
||||
|
@ -31,7 +31,7 @@ version_info = tuple(int(v) if v.isdigit() else v for v in __version__.split("."
|
||||
|
||||
def __getattr__(name: str):
|
||||
if name == "twisted_version":
|
||||
import warnings
|
||||
import warnings # pylint: disable=reimported
|
||||
|
||||
from twisted import version as _txv
|
||||
|
||||
|
@ -74,7 +74,7 @@ def _get_commands_from_entry_points(
|
||||
if inspect.isclass(obj):
|
||||
cmds[entry_point.name] = obj()
|
||||
else:
|
||||
raise Exception(f"Invalid entry point {entry_point.name}")
|
||||
raise ValueError(f"Invalid entry point {entry_point.name}")
|
||||
return cmds
|
||||
|
||||
|
||||
|
@ -92,11 +92,10 @@ class BaseItemExporter:
|
||||
field_iter = (
|
||||
(x, y) for x, y in self.fields_to_export.items() if x in item
|
||||
)
|
||||
elif include_empty:
|
||||
field_iter = self.fields_to_export
|
||||
else:
|
||||
if include_empty:
|
||||
field_iter = self.fields_to_export
|
||||
else:
|
||||
field_iter = (x for x in self.fields_to_export if x in item)
|
||||
field_iter = (x for x in self.fields_to_export if x in item)
|
||||
|
||||
for field_name in field_iter:
|
||||
if isinstance(field_name, str):
|
||||
|
@ -398,7 +398,7 @@ def maybeDeferred_coro(
|
||||
"""Copy of defer.maybeDeferred that also converts coroutines to Deferreds."""
|
||||
try:
|
||||
result = f(*args, **kw)
|
||||
except: # noqa: E722,B001
|
||||
except: # noqa: E722 # pylint: disable=bare-except
|
||||
return defer.fail(failure.Failure(captureVars=Deferred.debug))
|
||||
|
||||
if isinstance(result, Deferred):
|
||||
|
@ -60,7 +60,7 @@ def create_deprecated_class(
|
||||
deprecated_class: type | None = None
|
||||
warned_on_subclass: bool = False
|
||||
|
||||
def __new__(
|
||||
def __new__( # pylint: disable=bad-classmethod-argument
|
||||
metacls, name: str, bases: tuple[type, ...], clsdict_: dict[str, Any]
|
||||
) -> type:
|
||||
cls = super().__new__(metacls, name, bases, clsdict_)
|
||||
|
@ -130,7 +130,7 @@ _scrapy_root_handler: logging.Handler | None = None
|
||||
|
||||
|
||||
def install_scrapy_root_handler(settings: Settings) -> None:
|
||||
global _scrapy_root_handler
|
||||
global _scrapy_root_handler # noqa: PLW0603 # pylint: disable=global-statement
|
||||
|
||||
if (
|
||||
_scrapy_root_handler is not None
|
||||
|
@ -149,12 +149,11 @@ def verify_installed_reactor(reactor_path: str) -> None:
|
||||
|
||||
reactor_class = load_object(reactor_path)
|
||||
if not reactor.__class__ == reactor_class:
|
||||
msg = (
|
||||
raise RuntimeError(
|
||||
"The installed reactor "
|
||||
f"({reactor.__module__}.{reactor.__class__.__name__}) does not "
|
||||
f"match the requested one ({reactor_path})"
|
||||
)
|
||||
raise Exception(msg)
|
||||
|
||||
|
||||
def verify_installed_asyncio_event_loop(loop_path: str) -> None:
|
||||
@ -168,7 +167,7 @@ def verify_installed_asyncio_event_loop(loop_path: str) -> None:
|
||||
f".{reactor._asyncioEventloop.__class__.__qualname__}"
|
||||
)
|
||||
specified = f"{loop_class.__module__}.{loop_class.__qualname__}"
|
||||
raise Exception(
|
||||
raise RuntimeError(
|
||||
"Scrapy found an asyncio Twisted reactor already "
|
||||
f"installed, and its event loop class ({installed}) does "
|
||||
"not match the one specified in the ASYNCIO_EVENT_LOOP "
|
||||
|
@ -52,10 +52,6 @@ def iter_spider_classes(module: ModuleType) -> Iterable[type[Spider]]:
|
||||
"""Return an iterator over all spider classes defined in the given module
|
||||
that can be instantiated (i.e. which have name)
|
||||
"""
|
||||
# this needs to be imported here until get rid of the spider manager
|
||||
# singleton in scrapy.spider.spiders
|
||||
from scrapy.spiders import Spider
|
||||
|
||||
for obj in vars(module).values():
|
||||
if (
|
||||
inspect.isclass(obj)
|
||||
|
@ -14,7 +14,7 @@ from urllib.parse import ParseResult, urldefrag, urlparse, urlunparse
|
||||
|
||||
# scrapy.utils.url was moved to w3lib.url and import * ensures this
|
||||
# move doesn't break old code
|
||||
from w3lib.url import * # pylint: disable=unused-wildcard-import
|
||||
from w3lib.url import * # pylint: disable=unused-wildcard-import,wildcard-import
|
||||
from w3lib.url import _safe_chars, _unquotepath # noqa: F401
|
||||
|
||||
from scrapy.utils.python import to_unicode
|
||||
@ -50,7 +50,9 @@ def url_has_any_extension(url: UrlT, extensions: Iterable[str]) -> bool:
|
||||
return any(lowercase_path.endswith(ext) for ext in extensions)
|
||||
|
||||
|
||||
def parse_url(url: UrlT, encoding: str | None = None) -> ParseResult:
|
||||
def parse_url( # pylint: disable=function-redefined
|
||||
url: UrlT, encoding: str | None = None
|
||||
) -> ParseResult:
|
||||
"""Return urlparsed url from the given argument (which could be an already
|
||||
parsed url)
|
||||
"""
|
||||
|
@ -8,7 +8,7 @@ class TestSpiderPipeline:
|
||||
|
||||
class TestSpiderExceptionPipeline:
|
||||
def open_spider(self, spider):
|
||||
raise Exception("exception")
|
||||
raise RuntimeError("exception")
|
||||
|
||||
def process_item(self, item, spider):
|
||||
return item
|
||||
|
@ -349,11 +349,6 @@ class HttpTestCase(unittest.TestCase):
|
||||
request = Request(self.getURL("host"), headers={"Host": host})
|
||||
return self.download_request(request, Spider("foo")).addCallback(_test)
|
||||
|
||||
d = self.download_request(request, Spider("foo"))
|
||||
d.addCallback(lambda r: r.body)
|
||||
d.addCallback(self.assertEqual, b"localhost")
|
||||
return d
|
||||
|
||||
def test_content_length_zero_bodyless_post_request_headers(self):
|
||||
"""Tests if "Content-Length: 0" is sent for bodyless POST requests.
|
||||
|
||||
|
@ -178,7 +178,7 @@ class ProcessExceptionInvalidOutput(ManagerTestCase):
|
||||
|
||||
class InvalidProcessExceptionMiddleware:
|
||||
def process_request(self, request, spider):
|
||||
raise Exception
|
||||
raise RuntimeError
|
||||
|
||||
def process_exception(self, request, exception, spider):
|
||||
return 1
|
||||
|
@ -192,4 +192,3 @@ class TestPeriodicLog(unittest.TestCase):
|
||||
{"PERIODIC_LOG_STATS": {"include": ["downloader/"], "exclude": ["bytes"]}},
|
||||
lambda k, v: "downloader/" in k and "bytes" not in k,
|
||||
)
|
||||
#
|
||||
|
@ -727,7 +727,7 @@ class ExceptionJsonItemExporter(JsonItemExporter):
|
||||
"""JsonItemExporter that throws an exception every time export_item is called."""
|
||||
|
||||
def export_item(self, _):
|
||||
raise Exception("foo")
|
||||
raise RuntimeError("foo")
|
||||
|
||||
|
||||
class FeedExportTest(FeedExportTestBase):
|
||||
|
@ -330,10 +330,10 @@ class BasicItemLoaderTest(unittest.TestCase):
|
||||
il.add_value("name", ["mar", "ta"])
|
||||
self.assertEqual(il.get_output_value("name"), "Mar Ta")
|
||||
|
||||
class TakeFirstItemLoader(TestItemLoader):
|
||||
class TakeFirstItemLoader2(TestItemLoader):
|
||||
name_out = Join("<br>")
|
||||
|
||||
il = TakeFirstItemLoader()
|
||||
il = TakeFirstItemLoader2()
|
||||
il.add_value("name", ["mar", "ta"])
|
||||
self.assertEqual(il.get_output_value("name"), "Mar<br>Ta")
|
||||
|
||||
|
@ -78,7 +78,7 @@ class ProcessSpiderExceptionInvalidOutput(SpiderMiddlewareTestCase):
|
||||
|
||||
class RaiseExceptionProcessSpiderOutputMiddleware:
|
||||
def process_spider_output(self, response, result, spider):
|
||||
raise Exception
|
||||
raise RuntimeError
|
||||
|
||||
self.mwman._add_middleware(InvalidProcessSpiderOutputExceptionMiddleware())
|
||||
self.mwman._add_middleware(RaiseExceptionProcessSpiderOutputMiddleware())
|
||||
|
@ -247,7 +247,6 @@ class NotGeneratorFailMiddleware:
|
||||
r["processed"].append(f"{self.__class__.__name__}.process_spider_output")
|
||||
out.append(r)
|
||||
raise ReferenceError
|
||||
return out
|
||||
|
||||
def process_spider_exception(self, response, exception, spider):
|
||||
method = f"{self.__class__.__name__}.process_spider_exception"
|
||||
|
@ -26,7 +26,7 @@ class AsyncioTest(TestCase):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
install_reactor("twisted.internet.asyncioreactor.AsyncioSelectorReactor")
|
||||
self.assertEqual(len(w), 0)
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet import reactor # pylint: disable=reimported
|
||||
|
||||
assert original_reactor == reactor
|
||||
|
||||
|
@ -153,7 +153,7 @@ class AsyncDefTestsuiteTest(unittest.TestCase):
|
||||
@mark.xfail(reason="Checks that the test is actually executed", strict=True)
|
||||
@deferred_f_from_coro_f
|
||||
async def test_deferred_f_from_coro_f_xfail(self):
|
||||
raise Exception("This is expected to be raised")
|
||||
raise RuntimeError("This is expected to be raised")
|
||||
|
||||
|
||||
class AsyncCooperatorTest(unittest.TestCase):
|
||||
|
@ -32,7 +32,6 @@ def top_level_return_none():
|
||||
https://example.org
|
||||
"""
|
||||
yield url
|
||||
return
|
||||
|
||||
|
||||
def generator_that_returns_stuff():
|
||||
@ -103,11 +102,9 @@ https://example.org
|
||||
def test_generators_return_none(self):
|
||||
def f2():
|
||||
yield 1
|
||||
return None
|
||||
|
||||
def g2():
|
||||
yield 1
|
||||
return
|
||||
|
||||
def h2():
|
||||
yield 1
|
||||
@ -132,7 +129,6 @@ https://example.org
|
||||
https://example.org
|
||||
"""
|
||||
yield url
|
||||
return
|
||||
|
||||
def l2():
|
||||
return
|
||||
@ -181,12 +177,10 @@ https://example.org
|
||||
@decorator
|
||||
def f3():
|
||||
yield 1
|
||||
return None
|
||||
|
||||
@decorator
|
||||
def g3():
|
||||
yield 1
|
||||
return
|
||||
|
||||
@decorator
|
||||
def h3():
|
||||
@ -215,7 +209,6 @@ https://example.org
|
||||
https://example.org
|
||||
"""
|
||||
yield url
|
||||
return
|
||||
|
||||
@decorator
|
||||
def l3():
|
||||
|
@ -26,7 +26,7 @@ class UtilsSpidersTestCase(unittest.TestCase):
|
||||
self.assertEqual(list(iterate_spider_output([r, i, o])), [r, i, o])
|
||||
|
||||
def test_iter_spider_classes(self):
|
||||
import tests.test_utils_spider # pylint: disable=import-self
|
||||
import tests.test_utils_spider # noqa: PLW0406 # pylint: disable=import-self
|
||||
|
||||
it = iter_spider_classes(tests.test_utils_spider)
|
||||
self.assertEqual(set(it), {MySpider1, MySpider2})
|
||||
|
@ -327,8 +327,6 @@ def create_guess_scheme_t(args):
|
||||
def create_skipped_scheme_t(args):
|
||||
def do_expected(self):
|
||||
raise unittest.SkipTest(args[2])
|
||||
url = guess_scheme(args[0])
|
||||
assert url.startswith(args[1])
|
||||
|
||||
return do_expected
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user