1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-06 11:00:46 +00:00

Remove usage of deprecated mktemp (#5285)

This commit is contained in:
Jon 2024-02-26 10:53:06 -08:00 committed by GitHub
parent e208f82076
commit b0ef9a89a1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 26 additions and 34 deletions

View File

@ -4,7 +4,6 @@ skips:
- B105
- B301
- B303
- B306
- B307
- B311
- B320

View File

@ -6,13 +6,12 @@ import platform
import re
import subprocess
import sys
import tempfile
from contextlib import contextmanager
from itertools import chain
from pathlib import Path
from shutil import copytree, rmtree
from stat import S_IWRITE as ANYONE_WRITE_PERMISSION
from tempfile import mkdtemp
from tempfile import TemporaryFile, mkdtemp
from threading import Timer
from typing import Dict, Generator, Optional, Union
from unittest import skipIf
@ -82,7 +81,7 @@ class ProjectTest(unittest.TestCase):
rmtree(self.temp_path)
def call(self, *new_args, **kwargs):
with tempfile.TemporaryFile() as out:
with TemporaryFile() as out:
args = (sys.executable, "-m", "scrapy.cmdline") + new_args
return subprocess.call(
args, stdout=out, stderr=out, cwd=self.cwd, env=self.env, **kwargs

View File

@ -2,8 +2,8 @@ import contextlib
import os
import shutil
import sys
import tempfile
from pathlib import Path
from tempfile import mkdtemp, mkstemp
from typing import Optional, Type
from unittest import SkipTest, mock
@ -107,13 +107,14 @@ class LoadTestCase(unittest.TestCase):
class FileTestCase(unittest.TestCase):
def setUp(self):
# add a special char to check that they are handled correctly
self.tmpname = Path(self.mktemp() + "^")
self.fd, self.tmpname = mkstemp(suffix="^")
Path(self.tmpname).write_text("0123456789", encoding="utf-8")
handler = build_from_crawler(FileDownloadHandler, get_crawler())
self.download_request = handler.download_request
def tearDown(self):
self.tmpname.unlink()
os.close(self.fd)
os.remove(self.tmpname)
def test_download(self):
def _test(response):
@ -122,12 +123,12 @@ class FileTestCase(unittest.TestCase):
self.assertEqual(response.body, b"0123456789")
self.assertEqual(response.protocol, None)
request = Request(path_to_file_uri(str(self.tmpname)))
request = Request(path_to_file_uri(self.tmpname))
assert request.url.upper().endswith("%5E")
return self.download_request(request, Spider("foo")).addCallback(_test)
def test_non_existent(self):
request = Request(path_to_file_uri(self.mktemp()))
request = Request(path_to_file_uri(mkdtemp()))
d = self.download_request(request, Spider("foo"))
return self.assertFailure(d, OSError)
@ -224,8 +225,7 @@ class HttpTestCase(unittest.TestCase):
certfile = "keys/localhost.crt"
def setUp(self):
self.tmpname = Path(self.mktemp())
self.tmpname.mkdir()
self.tmpname = Path(mkdtemp())
(self.tmpname / "file").write_bytes(b"0123456789")
r = static.File(str(self.tmpname))
r.putChild(b"redirect", util.Redirect(b"/file"))
@ -651,8 +651,7 @@ class Https11CustomCiphers(unittest.TestCase):
certfile = "keys/localhost.crt"
def setUp(self):
self.tmpname = Path(self.mktemp())
self.tmpname.mkdir()
self.tmpname = Path(mkdtemp())
(self.tmpname / "file").write_bytes(b"0123456789")
r = static.File(str(self.tmpname))
self.site = server.Site(r, timeout=None)
@ -1015,8 +1014,7 @@ class BaseFTPTestCase(unittest.TestCase):
from scrapy.core.downloader.handlers.ftp import FTPDownloadHandler
# setup dirs and test file
self.directory = Path(self.mktemp())
self.directory.mkdir()
self.directory = Path(mkdtemp())
userdir = self.directory / self.username
userdir.mkdir()
for filename, content in self.test_files:
@ -1092,7 +1090,7 @@ class BaseFTPTestCase(unittest.TestCase):
return self._add_test_callbacks(d, _test)
def test_ftp_local_filename(self):
f, local_fname = tempfile.mkstemp()
f, local_fname = mkstemp()
fname_bytes = to_bytes(local_fname)
local_fname = Path(local_fname)
os.close(f)
@ -1113,7 +1111,7 @@ class BaseFTPTestCase(unittest.TestCase):
return self._add_test_callbacks(d, _test)
def _test_response_class(self, filename, response_class):
f, local_fname = tempfile.mkstemp()
f, local_fname = mkstemp()
local_fname = Path(local_fname)
os.close(f)
meta = {}
@ -1163,9 +1161,7 @@ class AnonymousFTPTestCase(BaseFTPTestCase):
from scrapy.core.downloader.handlers.ftp import FTPDownloadHandler
# setup dir and test file
self.directory = Path(self.mktemp())
self.directory.mkdir()
self.directory = Path(mkdtemp())
for filename, content in self.test_files:
(self.directory / filename).write_bytes(content)

View File

@ -5,6 +5,7 @@ import shutil
import string
from ipaddress import IPv4Address
from pathlib import Path
from tempfile import mkdtemp
from typing import Dict
from unittest import mock, skipIf
from urllib.parse import urlencode
@ -185,8 +186,7 @@ class Https2ClientProtocolTestCase(TestCase):
certificate_file = Path(__file__).parent / "keys" / "localhost.crt"
def _init_resource(self):
self.temp_directory = self.mktemp()
Path(self.temp_directory).mkdir()
self.temp_directory = mkdtemp()
r = File(self.temp_directory)
r.putChild(b"get-data-html-small", GetDataHtmlSmall())
r.putChild(b"get-data-html-large", GetDataHtmlLarge())

View File

@ -1,5 +1,6 @@
import shutil
from pathlib import Path
from tempfile import mkdtemp
from typing import Optional, Set
from testfixtures import LogCapture
@ -67,8 +68,7 @@ class FileDownloadCrawlTestCase(TestCase):
self.mockserver.__enter__()
# prepare a directory for storing files
self.tmpmediastore = Path(self.mktemp())
self.tmpmediastore.mkdir()
self.tmpmediastore = Path(mkdtemp())
self.settings = {
"REQUEST_FINGERPRINTER_IMPLEMENTATION": "2.7",
"ITEM_PIPELINES": {self.pipeline_class: 1},

View File

@ -3,6 +3,7 @@ import sys
import tempfile
import warnings
from pathlib import Path
from tempfile import mkdtemp
from twisted.trial import unittest
from zope.interface.verify import verifyObject
@ -139,8 +140,7 @@ class SpiderLoaderTest(unittest.TestCase):
class DuplicateSpiderNameLoaderTest(unittest.TestCase):
def setUp(self):
orig_spiders_dir = module_dir / "test_spiders"
self.tmpdir = Path(self.mktemp())
self.tmpdir.mkdir()
self.tmpdir = Path(mkdtemp())
self.spiders_dir = self.tmpdir / "test_spiders_xxx"
_copytree(orig_spiders_dir, self.spiders_dir)
sys.path.append(str(self.tmpdir))

View File

@ -1,6 +1,6 @@
import shutil
from datetime import datetime, timezone
from pathlib import Path
from tempfile import mkdtemp
from twisted.trial import unittest
@ -12,8 +12,7 @@ from scrapy.utils.test import get_crawler
class SpiderStateTest(unittest.TestCase):
def test_store_load(self):
jobdir = self.mktemp()
Path(jobdir).mkdir()
jobdir = mkdtemp()
try:
spider = Spider(name="default")
dt = datetime.now(tz=timezone.utc)

View File

@ -25,7 +25,7 @@ class BaseQueueTestCase(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix="scrapy-queue-tests-")
self.qpath = self.tempfilename()
self.qdir = self.mkdtemp()
self.qdir = tempfile.mkdtemp()
self.crawler = get_crawler(Spider)
def tearDown(self):

View File

@ -4,6 +4,7 @@ Tests borrowed from the twisted.web.client tests.
"""
import shutil
from pathlib import Path
from tempfile import mkdtemp
import OpenSSL.SSL
from twisted.internet import defer, reactor
@ -274,8 +275,7 @@ class WebClientTestCase(unittest.TestCase):
return reactor.listenTCP(0, site, interface="127.0.0.1")
def setUp(self):
self.tmpname = Path(self.mktemp())
self.tmpname.mkdir()
self.tmpname = Path(mkdtemp())
(self.tmpname / "file").write_bytes(b"0123456789")
r = static.File(str(self.tmpname))
r.putChild(b"redirect", util.Redirect(b"/file"))
@ -440,8 +440,7 @@ class WebClientSSLTestCase(unittest.TestCase):
return f"https://127.0.0.1:{self.portno}/{path}"
def setUp(self):
self.tmpname = Path(self.mktemp())
self.tmpname.mkdir()
self.tmpname = Path(mkdtemp())
(self.tmpname / "file").write_bytes(b"0123456789")
r = static.File(str(self.tmpname))
r.putChild(b"payload", PayloadResource())