mirror of
https://github.com/scrapy/scrapy.git
synced 2025-03-15 17:50:44 +00:00
Import changes
This commit is contained in:
parent
f85bf77da3
commit
889b471852
@ -1,11 +1,11 @@
|
||||
"""Download handlers for http and https schemes"""
|
||||
|
||||
import ipaddress
|
||||
import logging
|
||||
import re
|
||||
import warnings
|
||||
from contextlib import suppress
|
||||
from io import BytesIO
|
||||
from ipaddress import ip_address
|
||||
from time import time
|
||||
from urllib.parse import urldefrag
|
||||
|
||||
@ -468,7 +468,7 @@ class _ResponseReader(protocol.Protocol):
|
||||
self._certificate = ssl.Certificate(self.transport._producer.getPeerCertificate())
|
||||
|
||||
if self._ip_address is None:
|
||||
self._ip_address = ip_address(self.transport._producer.getPeer().host)
|
||||
self._ip_address = ipaddress.ip_address(self.transport._producer.getPeer().host)
|
||||
|
||||
def dataReceived(self, bodyBytes):
|
||||
# This maybe called several times after cancel was called with buffered data.
|
||||
|
@ -441,13 +441,15 @@ with multiples lines
|
||||
self.assertEqual(cert.getIssuer().commonName, b"localhost")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_dns_server_ip_address(self):
|
||||
def test_dns_server_ip_address_none(self):
|
||||
crawler = self.runner.create_crawler(SingleRequestSpider)
|
||||
url = self.mockserver.url('/status?n=200')
|
||||
yield crawler.crawl(seed=url, mockserver=self.mockserver)
|
||||
ip_address = crawler.spider.meta['responses'][0].ip_address
|
||||
self.assertIsNone(ip_address)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_dns_server_ip_address(self):
|
||||
crawler = self.runner.create_crawler(SingleRequestSpider)
|
||||
url = self.mockserver.url('/echo?body=test')
|
||||
expected_netloc, _ = urlparse(url).netloc.split(':')
|
||||
|
Loading…
x
Reference in New Issue
Block a user