1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-06 11:00:46 +00:00

Enable PTH Ruff rules.

This commit is contained in:
Andrey Rakhmatullin 2025-01-01 22:03:42 +05:00
parent f44ca39fa2
commit 273620488c
8 changed files with 18 additions and 16 deletions

View File

@ -240,6 +240,8 @@ extend-select = [
"PIE",
# pylint
"PL",
# flake8-use-pathlib
"PTH",
# flake8-pyi
"PYI",
# flake8-quotes

View File

@ -154,7 +154,7 @@ class Command(ScrapyCommand):
spiders_dir = Path(spiders_module.__file__).parent.resolve()
else:
spiders_module = None
spiders_dir = Path(".")
spiders_dir = Path()
spider_file = f"{spiders_dir / module}.py"
shutil.copyfile(template_file, spider_file)
render_templatefile(spider_file, **tvars)

View File

@ -1,6 +1,5 @@
from __future__ import annotations
import os
import re
import string
from importlib.util import find_spec
@ -28,9 +27,9 @@ TEMPLATES_TO_RENDER: tuple[tuple[str, ...], ...] = (
IGNORE = ignore_patterns("*.pyc", "__pycache__", ".svn")
def _make_writable(path: str | os.PathLike) -> None:
current_permissions = os.stat(path).st_mode
os.chmod(path, current_permissions | OWNER_WRITE_PERMISSION)
def _make_writable(path: Path) -> None:
current_permissions = path.stat().st_mode
path.chmod(current_permissions | OWNER_WRITE_PERMISSION)
class Command(ScrapyCommand):

View File

@ -32,6 +32,7 @@ from __future__ import annotations
import re
from io import BytesIO
from pathlib import Path
from typing import TYPE_CHECKING, Any, BinaryIO
from urllib.parse import unquote
@ -56,9 +57,11 @@ if TYPE_CHECKING:
class ReceivedDataProtocol(Protocol):
def __init__(self, filename: str | None = None):
self.__filename: str | None = filename
self.body: BinaryIO = open(filename, "wb") if filename else BytesIO()
def __init__(self, filename: bytes | None = None):
self.__filename: bytes | None = filename
self.body: BinaryIO = (
Path(filename.decode()).open("wb") if filename else BytesIO()
)
self.size: int = 0
def dataReceived(self, data: bytes) -> None:
@ -66,7 +69,7 @@ class ReceivedDataProtocol(Protocol):
self.size += len(data)
@property
def filename(self) -> str | None:
def filename(self) -> bytes | None:
return self.__filename
def close(self) -> None:
@ -128,8 +131,8 @@ class FTPDownloadHandler:
) -> Response:
self.result = result
protocol.close()
headers = {"local filename": protocol.filename or "", "size": protocol.size}
body = to_bytes(protocol.filename or protocol.body.read())
headers = {"local filename": protocol.filename or b"", "size": protocol.size}
body = protocol.filename or protocol.body.read()
respcls = responsetypes.from_args(url=request.url, body=body)
# hints for Headers-related types may need to be fixed to not use AnyStr
return respcls(url=request.url, status=200, body=body, headers=headers) # type: ignore[arg-type]

View File

@ -17,7 +17,7 @@ if TYPE_CHECKING:
class ProcessTest:
command: str | None = None
prefix = [sys.executable, "-m", "scrapy.cmdline"]
cwd = os.getcwd() # trial chdirs to temp dir
cwd = os.getcwd() # trial chdirs to temp dir # noqa: PTH109
def execute(
self,

View File

@ -1,5 +1,4 @@
import logging
import os
import platform
import re
import signal
@ -643,7 +642,6 @@ class CrawlerRunnerHasSpider(unittest.TestCase):
class ScriptRunnerMixin:
script_dir: Path
cwd = os.getcwd()
def get_script_args(self, script_name: str, *script_args: str) -> list[str]:
script_path = self.script_dir / script_name

View File

@ -116,7 +116,7 @@ class FileTestCase(unittest.TestCase):
def tearDown(self):
os.close(self.fd)
os.remove(self.tmpname)
Path(self.tmpname).unlink()
def test_download(self):
def _test(response):

View File

@ -12,7 +12,7 @@ from scrapy.utils.project import data_path, get_project_settings
@contextlib.contextmanager
def inside_a_project():
prev_dir = os.getcwd()
prev_dir = Path.cwd()
project_dir = tempfile.mkdtemp()
try: