1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-06 11:00:46 +00:00

Removed some deprecated functions and functionalities (#6116)

This commit is contained in:
Chenwei Niu 2023-10-30 19:59:11 +11:00 committed by GitHub
parent 1d81585612
commit 1f797d0fdb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 1 additions and 87 deletions

View File

@ -2,7 +2,6 @@ from __future__ import annotations
import logging
import sys
import warnings
from logging.config import dictConfig
from types import TracebackType
from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Type, Union, cast
@ -11,7 +10,6 @@ from twisted.python import log as twisted_log
from twisted.python.failure import Failure
import scrapy
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.settings import Settings
from scrapy.utils.versions import scrapy_components_versions
@ -232,18 +230,9 @@ def logformatter_adapter(logkws: dict) -> Tuple[int, str, dict]:
and adapts it into a tuple of positional arguments for logger.log calls,
handling backward compatibility as well.
"""
if not {"level", "msg", "args"} <= set(logkws):
warnings.warn("Missing keys in LogFormatter method", ScrapyDeprecationWarning)
if "format" in logkws:
warnings.warn(
"`format` key in LogFormatter methods has been "
"deprecated, use `msg` instead",
ScrapyDeprecationWarning,
)
level = logkws.get("level", logging.INFO)
message = logkws.get("format", logkws.get("msg"))
message = logkws.get("msg") or ""
# NOTE: This also handles 'args' being an empty dict, that case doesn't
# play well in logger.log calls
args = logkws if not logkws.get("args") else logkws["args"]

View File

@ -14,7 +14,6 @@ from w3lib import html
import scrapy
from scrapy.http.response import Response
from scrapy.utils.decorators import deprecated
from scrapy.utils.python import to_bytes, to_unicode
_baseurl_cache: "WeakKeyDictionary[Response, str]" = WeakKeyDictionary()
@ -55,25 +54,6 @@ def response_status_message(status: Union[bytes, float, int, str]) -> str:
return f"{status_int} {to_unicode(message)}"
@deprecated
def response_httprepr(response: Response) -> bytes:
"""Return raw HTTP representation (as bytes) of the given response. This
is provided only for reference, since it's not the exact stream of bytes
that was received (that's not exposed by Twisted).
"""
values = [
b"HTTP/1.1 ",
to_bytes(str(response.status)),
b" ",
to_bytes(http.RESPONSES.get(response.status, b"")),
b"\r\n",
]
if response.headers:
values.extend([response.headers.to_string(), b"\r\n"])
values.extend([b"\r\n", response.body])
return b"".join(values)
def open_in_browser(
response: Union[
"scrapy.http.response.html.HtmlResponse",

View File

@ -1,12 +1,8 @@
import warnings
from itertools import product
from unittest import TestCase
from scrapy.downloadermiddlewares.stats import DownloaderStats
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.http import Request, Response
from scrapy.spiders import Spider
from scrapy.utils.response import response_httprepr
from scrapy.utils.test import get_crawler
@ -40,25 +36,6 @@ class TestDownloaderStats(TestCase):
self.mw.process_response(self.req, self.res, self.spider)
self.assertStatsEqual("downloader/response_count", 1)
def test_response_len(self):
body = (b"", b"not_empty") # empty/notempty body
headers = (
{},
{"lang": "en"},
{"lang": "en", "User-Agent": "scrapy"},
) # 0 headers, 1h and 2h
test_responses = [ # form test responses with all combinations of body/headers
Response(url="scrapytest.org", status=200, body=r[0], headers=r[1])
for r in product(body, headers)
]
for test_response in test_responses:
self.crawler.stats.set_value("downloader/response_bytes", 0)
self.mw.process_response(self.req, test_response, self.spider)
with warnings.catch_warnings():
warnings.simplefilter("ignore", ScrapyDeprecationWarning)
resp_size = len(response_httprepr(test_response))
self.assertStatsEqual("downloader/response_bytes", resp_size)
def test_process_exception(self):
self.mw.process_exception(self.req, MyException(), self.spider)
self.assertStatsEqual("downloader/exception_count", 1)

View File

@ -1,16 +1,13 @@
import unittest
import warnings
from pathlib import Path
from urllib.parse import urlparse
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.http import HtmlResponse, Response, TextResponse
from scrapy.utils.python import to_bytes
from scrapy.utils.response import (
get_base_url,
get_meta_refresh,
open_in_browser,
response_httprepr,
response_status_message,
)
@ -20,35 +17,6 @@ __doctests__ = ["scrapy.utils.response"]
class ResponseUtilsTest(unittest.TestCase):
dummy_response = TextResponse(url="http://example.org/", body=b"dummy_response")
def test_response_httprepr(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", ScrapyDeprecationWarning)
r1 = Response("http://www.example.com")
self.assertEqual(response_httprepr(r1), b"HTTP/1.1 200 OK\r\n\r\n")
r1 = Response(
"http://www.example.com",
status=404,
headers={"Content-type": "text/html"},
body=b"Some body",
)
self.assertEqual(
response_httprepr(r1),
b"HTTP/1.1 404 Not Found\r\nContent-Type: text/html\r\n\r\nSome body",
)
r1 = Response(
"http://www.example.com",
status=6666,
headers={"Content-type": "text/html"},
body=b"Some body",
)
self.assertEqual(
response_httprepr(r1),
b"HTTP/1.1 6666 \r\nContent-Type: text/html\r\n\r\nSome body",
)
def test_open_in_browser(self):
url = "http:///www.example.com/some/page.html"
body = b"<html> <head> <title>test page</title> </head> <body>test body</body> </html>"