mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-22 04:12:52 +00:00
renamed UserTimeoutError and ServerTimeouterror #583
This commit is contained in:
parent
4cf6a3b4b3
commit
ebf76867bd
@ -18,9 +18,9 @@ About HTTP errors to consider:
|
||||
indicate server overload, which would be something we want to retry
|
||||
"""
|
||||
|
||||
from twisted.internet.defer import TimeoutError as UserTimeoutError
|
||||
from twisted.internet.error import TimeoutError as ServerTimeoutError, \
|
||||
DNSLookupError, ConnectionRefusedError, ConnectionDone, ConnectError, \
|
||||
from twisted.internet import defer
|
||||
from twisted.internet.error import TimeoutError, DNSLookupError, \
|
||||
ConnectionRefusedError, ConnectionDone, ConnectError, \
|
||||
ConnectionLost, TCPTimedOutError
|
||||
|
||||
from scrapy import log
|
||||
@ -33,7 +33,7 @@ class RetryMiddleware(object):
|
||||
|
||||
# IOError is raised by the HttpCompression middleware when trying to
|
||||
# decompress an empty response
|
||||
EXCEPTIONS_TO_RETRY = (ServerTimeoutError, UserTimeoutError, DNSLookupError,
|
||||
EXCEPTIONS_TO_RETRY = (defer.TimeoutError, TimeoutError, DNSLookupError,
|
||||
ConnectionRefusedError, ConnectionDone, ConnectError,
|
||||
ConnectionLost, TCPTimedOutError, ResponseFailed,
|
||||
IOError)
|
||||
|
@ -1,7 +1,8 @@
|
||||
import unittest
|
||||
from twisted.internet.error import TimeoutError as ServerTimeoutError, \
|
||||
DNSLookupError, ConnectionRefusedError, ConnectionDone, ConnectError, \
|
||||
ConnectionLost
|
||||
from twisted.internet import defer
|
||||
from twisted.internet.error import TimeoutError, DNSLookupError, \
|
||||
ConnectionRefusedError, ConnectionDone, ConnectError, \
|
||||
ConnectionLost, TCPTimedOutError
|
||||
|
||||
from scrapy import optional_features
|
||||
from scrapy.contrib.downloadermiddleware.retry import RetryMiddleware
|
||||
@ -41,7 +42,6 @@ class RetryTest(unittest.TestCase):
|
||||
|
||||
def test_dont_retry_exc(self):
|
||||
req = Request('http://www.scrapytest.org/503', meta={'dont_retry': True})
|
||||
rsp = Response('http://www.scrapytest.org/503', body='', status=503)
|
||||
|
||||
r = self.mw.process_exception(req, DNSLookupError(), self.spider)
|
||||
assert r is None
|
||||
@ -64,9 +64,9 @@ class RetryTest(unittest.TestCase):
|
||||
assert self.mw.process_response(req, rsp, self.spider) is rsp
|
||||
|
||||
def test_twistederrors(self):
|
||||
exceptions = [ServerTimeoutError, DNSLookupError,
|
||||
ConnectionRefusedError, ConnectionDone, ConnectError,
|
||||
ConnectionLost]
|
||||
exceptions = [defer.TimeoutError, TCPTimedOutError, TimeoutError,
|
||||
DNSLookupError, ConnectionRefusedError, ConnectionDone,
|
||||
ConnectError, ConnectionLost]
|
||||
if 'http11' in optional_features:
|
||||
exceptions.append(ResponseFailed)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user