mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-06 11:00:46 +00:00
Remove build_from_settings().
This commit is contained in:
parent
bcef96570b
commit
eda3a89b3f
@ -2,12 +2,13 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import pprint
|
||||
import warnings
|
||||
from collections import defaultdict, deque
|
||||
from typing import TYPE_CHECKING, Any, TypeVar, cast
|
||||
|
||||
from scrapy.exceptions import NotConfigured
|
||||
from scrapy.exceptions import NotConfigured, ScrapyDeprecationWarning
|
||||
from scrapy.utils.defer import process_chain, process_parallel
|
||||
from scrapy.utils.misc import build_from_crawler, build_from_settings, load_object
|
||||
from scrapy.utils.misc import build_from_crawler, load_object
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Iterable
|
||||
@ -20,7 +21,7 @@ if TYPE_CHECKING:
|
||||
|
||||
from scrapy import Spider
|
||||
from scrapy.crawler import Crawler
|
||||
from scrapy.settings import Settings
|
||||
from scrapy.settings import BaseSettings, Settings
|
||||
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
@ -50,8 +51,27 @@ class MiddlewareManager:
|
||||
def _get_mwlist_from_settings(cls, settings: Settings) -> list[Any]:
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def _build_from_settings(objcls: type[_T], settings: BaseSettings) -> _T:
|
||||
if hasattr(objcls, "from_settings"):
|
||||
instance = objcls.from_settings(settings) # type: ignore[attr-defined]
|
||||
method_name = "from_settings"
|
||||
else:
|
||||
instance = objcls()
|
||||
method_name = "__new__"
|
||||
if instance is None:
|
||||
raise TypeError(f"{objcls.__qualname__}.{method_name} returned None")
|
||||
return cast(_T, instance)
|
||||
|
||||
@classmethod
|
||||
def from_settings(cls, settings: Settings, crawler: Crawler | None = None) -> Self:
|
||||
if crawler is None:
|
||||
warnings.warn(
|
||||
"Calling MiddlewareManager.from_settings() without a Crawler instance is deprecated."
|
||||
" As this method will be deprecated in the future, please switch to from_crawler().",
|
||||
category=ScrapyDeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
mwlist = cls._get_mwlist_from_settings(settings)
|
||||
middlewares = []
|
||||
enabled = []
|
||||
@ -61,7 +81,7 @@ class MiddlewareManager:
|
||||
if crawler is not None:
|
||||
mw = build_from_crawler(mwcls, crawler)
|
||||
else:
|
||||
mw = build_from_settings(mwcls, settings)
|
||||
mw = MiddlewareManager._build_from_settings(mwcls, settings)
|
||||
middlewares.append(mw)
|
||||
enabled.append(clspath)
|
||||
except NotConfigured as e:
|
||||
|
@ -26,7 +26,6 @@ if TYPE_CHECKING:
|
||||
|
||||
from scrapy import Spider
|
||||
from scrapy.crawler import Crawler
|
||||
from scrapy.settings import BaseSettings
|
||||
|
||||
|
||||
_ITERABLE_SINGLE_VALUES = dict, Item, str, bytes
|
||||
@ -150,7 +149,7 @@ def create_instance(objcls, settings, crawler, *args, **kwargs):
|
||||
"""
|
||||
warnings.warn(
|
||||
"The create_instance() function is deprecated. "
|
||||
"Please use build_from_crawler() or build_from_settings() instead.",
|
||||
"Please use build_from_crawler() instead.",
|
||||
category=ScrapyDeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
@ -176,7 +175,7 @@ def create_instance(objcls, settings, crawler, *args, **kwargs):
|
||||
def build_from_crawler(
|
||||
objcls: type[T], crawler: Crawler, /, *args: Any, **kwargs: Any
|
||||
) -> T:
|
||||
"""Construct a class instance using its ``from_crawler`` constructor.
|
||||
"""Construct a class instance using its ``from_crawler`` or ``from_settings`` constructor.
|
||||
|
||||
``*args`` and ``**kwargs`` are forwarded to the constructor.
|
||||
|
||||
@ -196,26 +195,6 @@ def build_from_crawler(
|
||||
return cast(T, instance)
|
||||
|
||||
|
||||
def build_from_settings(
|
||||
objcls: type[T], settings: BaseSettings, /, *args: Any, **kwargs: Any
|
||||
) -> T:
|
||||
"""Construct a class instance using its ``from_settings`` constructor.
|
||||
|
||||
``*args`` and ``**kwargs`` are forwarded to the constructor.
|
||||
|
||||
Raises ``TypeError`` if the resulting instance is ``None``.
|
||||
"""
|
||||
if hasattr(objcls, "from_settings"):
|
||||
instance = objcls.from_settings(settings, *args, **kwargs) # type: ignore[attr-defined]
|
||||
method_name = "from_settings"
|
||||
else:
|
||||
instance = objcls(*args, **kwargs)
|
||||
method_name = "__new__"
|
||||
if instance is None:
|
||||
raise TypeError(f"{objcls.__qualname__}.{method_name} returned None")
|
||||
return cast(T, instance)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def set_environ(**kwargs: str) -> Iterator[None]:
|
||||
"""Temporarily set environment variables inside the context manager and
|
||||
|
@ -2,7 +2,7 @@ from twisted.trial import unittest
|
||||
|
||||
from scrapy.exceptions import NotConfigured
|
||||
from scrapy.middleware import MiddlewareManager
|
||||
from scrapy.settings import Settings
|
||||
from scrapy.utils.test import get_crawler
|
||||
|
||||
|
||||
class M1:
|
||||
@ -23,8 +23,6 @@ class M2:
|
||||
def close_spider(self, spider):
|
||||
pass
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class M3:
|
||||
def process(self, response, request, spider):
|
||||
@ -83,7 +81,7 @@ class MiddlewareManagerTest(unittest.TestCase):
|
||||
self.assertEqual(mwman.middlewares, (m1, m2, m3))
|
||||
|
||||
def test_enabled_from_settings(self):
|
||||
settings = Settings()
|
||||
mwman = TestMiddlewareManager.from_settings(settings)
|
||||
crawler = get_crawler()
|
||||
mwman = TestMiddlewareManager.from_crawler(crawler)
|
||||
classes = [x.__class__ for x in mwman.middlewares]
|
||||
self.assertEqual(classes, [M1, M3])
|
||||
|
@ -10,7 +10,6 @@ from scrapy.item import Field, Item
|
||||
from scrapy.utils.misc import (
|
||||
arg_to_iter,
|
||||
build_from_crawler,
|
||||
build_from_settings,
|
||||
create_instance,
|
||||
load_object,
|
||||
rel_has_nofollow,
|
||||
@ -197,39 +196,6 @@ class UtilsMiscTestCase(unittest.TestCase):
|
||||
with self.assertRaises(TypeError):
|
||||
build_from_crawler(m, crawler, *args, **kwargs)
|
||||
|
||||
def test_build_from_settings(self):
|
||||
settings = mock.MagicMock()
|
||||
args = (True, 100.0)
|
||||
kwargs = {"key": "val"}
|
||||
|
||||
def _test_with_settings(mock, settings):
|
||||
build_from_settings(mock, settings, *args, **kwargs)
|
||||
if hasattr(mock, "from_settings"):
|
||||
mock.from_settings.assert_called_once_with(settings, *args, **kwargs)
|
||||
self.assertEqual(mock.call_count, 0)
|
||||
else:
|
||||
mock.assert_called_once_with(*args, **kwargs)
|
||||
|
||||
# Check usage of correct constructor using three mocks:
|
||||
# 1. with no alternative constructors
|
||||
# 2. with from_settings() constructor
|
||||
# 3. with from_settings() and from_crawler() constructor
|
||||
spec_sets = (
|
||||
["__qualname__"],
|
||||
["__qualname__", "from_settings"],
|
||||
["__qualname__", "from_settings", "from_crawler"],
|
||||
)
|
||||
for specs in spec_sets:
|
||||
m = mock.MagicMock(spec_set=specs)
|
||||
_test_with_settings(m, settings)
|
||||
m.reset_mock()
|
||||
|
||||
# Check adoption of crawler settings
|
||||
m = mock.MagicMock(spec_set=["__qualname__", "from_settings"])
|
||||
m.from_settings.return_value = None
|
||||
with self.assertRaises(TypeError):
|
||||
build_from_settings(m, settings, *args, **kwargs)
|
||||
|
||||
def test_set_environ(self):
|
||||
assert os.environ.get("some_test_environ") is None
|
||||
with set_environ(some_test_environ="test_value"):
|
||||
|
@ -9,25 +9,18 @@ from tempfile import mkdtemp
|
||||
|
||||
import OpenSSL.SSL
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
from twisted.internet.testing import StringTransport
|
||||
from twisted.protocols.policies import WrappingFactory
|
||||
from twisted.trial import unittest
|
||||
from twisted.web import resource, server, static, util
|
||||
|
||||
try:
|
||||
from twisted.internet.testing import StringTransport
|
||||
except ImportError:
|
||||
# deprecated in Twisted 19.7.0
|
||||
# (remove once we bump our requirement past that version)
|
||||
from twisted.test.proto_helpers import StringTransport
|
||||
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
from twisted.protocols.policies import WrappingFactory
|
||||
|
||||
from scrapy.core.downloader import webclient as client
|
||||
from scrapy.core.downloader.contextfactory import ScrapyClientContextFactory
|
||||
from scrapy.http import Headers, Request
|
||||
from scrapy.settings import Settings
|
||||
from scrapy.utils.misc import build_from_settings
|
||||
from scrapy.utils.misc import build_from_crawler
|
||||
from scrapy.utils.python import to_bytes, to_unicode
|
||||
from scrapy.utils.test import get_crawler
|
||||
from tests.mockserver import (
|
||||
BrokenDownloadResource,
|
||||
ErrorResource,
|
||||
@ -469,22 +462,22 @@ class WebClientCustomCiphersSSLTestCase(WebClientSSLTestCase):
|
||||
|
||||
def testPayload(self):
|
||||
s = "0123456789" * 10
|
||||
settings = Settings({"DOWNLOADER_CLIENT_TLS_CIPHERS": self.custom_ciphers})
|
||||
client_context_factory = build_from_settings(
|
||||
ScrapyClientContextFactory, settings
|
||||
crawler = get_crawler(
|
||||
settings_dict={"DOWNLOADER_CLIENT_TLS_CIPHERS": self.custom_ciphers}
|
||||
)
|
||||
client_context_factory = build_from_crawler(ScrapyClientContextFactory, crawler)
|
||||
return getPage(
|
||||
self.getURL("payload"), body=s, contextFactory=client_context_factory
|
||||
).addCallback(self.assertEqual, to_bytes(s))
|
||||
|
||||
def testPayloadDisabledCipher(self):
|
||||
s = "0123456789" * 10
|
||||
settings = Settings(
|
||||
{"DOWNLOADER_CLIENT_TLS_CIPHERS": "ECDHE-RSA-AES256-GCM-SHA384"}
|
||||
)
|
||||
client_context_factory = build_from_settings(
|
||||
ScrapyClientContextFactory, settings
|
||||
crawler = get_crawler(
|
||||
settings_dict={
|
||||
"DOWNLOADER_CLIENT_TLS_CIPHERS": "ECDHE-RSA-AES256-GCM-SHA384"
|
||||
}
|
||||
)
|
||||
client_context_factory = build_from_crawler(ScrapyClientContextFactory, crawler)
|
||||
d = getPage(
|
||||
self.getURL("payload"), body=s, contextFactory=client_context_factory
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user