mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-23 17:03:40 +00:00
remove request_uploaded signal and move response_received and response_downloaded to downloader manager. closes #228
--HG-- extra : rebase_source : 4af0d2a01b34de8a21048bb7f4a66bfc484b3b8f
This commit is contained in:
parent
3c5ab10688
commit
3414bf13ee
@ -200,27 +200,11 @@ request_received
|
||||
:param spider: the spider which generated the request
|
||||
:type spider: :class:`~scrapy.spider.BaseSpider` object
|
||||
|
||||
request_uploaded
|
||||
----------------
|
||||
|
||||
.. signal:: request_uploaded
|
||||
.. function:: request_uploaded(request, spider)
|
||||
|
||||
Sent right after the download has sent a :class:`~scrapy.http.Request`.
|
||||
|
||||
This signal does not support returning deferreds from their handlers.
|
||||
|
||||
:param request: the request uploaded/sent
|
||||
:type request: :class:`~scrapy.http.Request` object
|
||||
|
||||
:param spider: the spider which generated the request
|
||||
:type spider: :class:`~scrapy.spider.BaseSpider` object
|
||||
|
||||
response_received
|
||||
-----------------
|
||||
|
||||
.. signal:: response_received
|
||||
.. function:: response_received(response, spider)
|
||||
.. function:: response_received(response, request, spider)
|
||||
|
||||
Sent when the engine receives a new :class:`~scrapy.http.Response` from the
|
||||
downloader.
|
||||
@ -230,6 +214,9 @@ response_received
|
||||
:param response: the response received
|
||||
:type response: :class:`~scrapy.http.Response` object
|
||||
|
||||
:param request: the request that generated the response
|
||||
:type request: :class:`~scrapy.http.Request` object
|
||||
|
||||
:param spider: the spider for which the response is intended
|
||||
:type spider: :class:`~scrapy.spider.BaseSpider` object
|
||||
|
||||
@ -237,7 +224,7 @@ response_downloaded
|
||||
-------------------
|
||||
|
||||
.. signal:: response_downloaded
|
||||
.. function:: response_downloaded(response, spider)
|
||||
.. function:: response_downloaded(response, request, spider)
|
||||
|
||||
Sent by the downloader right after a ``HTTPResponse`` is downloaded.
|
||||
|
||||
@ -246,6 +233,9 @@ response_downloaded
|
||||
:param response: the response downloaded
|
||||
:type response: :class:`~scrapy.http.Response` object
|
||||
|
||||
:param request: the request that generated the response
|
||||
:type request: :class:`~scrapy.http.Request` object
|
||||
|
||||
:param spider: the spider for which the response is intended
|
||||
:type spider: :class:`~scrapy.spider.BaseSpider` object
|
||||
|
||||
|
@ -2,9 +2,7 @@
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
from scrapy import signals
|
||||
from scrapy.exceptions import NotSupported
|
||||
from scrapy.utils.signal import send_catch_log
|
||||
from scrapy.utils.misc import load_object
|
||||
from scrapy.conf import settings
|
||||
from scrapy import optional_features
|
||||
@ -31,17 +29,8 @@ class HttpDownloadHandler(object):
|
||||
return factory.deferred
|
||||
|
||||
def _create_factory(self, request, spider):
|
||||
def _download_signals(response):
|
||||
send_catch_log(signal=signals.request_uploaded, request=request, \
|
||||
spider=spider)
|
||||
send_catch_log(signal=signals.response_downloaded, response=response, \
|
||||
spider=spider)
|
||||
return response
|
||||
|
||||
timeout = getattr(spider, "download_timeout", None) or self.download_timeout
|
||||
factory = self.httpclientfactory(request, timeout)
|
||||
factory.deferred.addCallbacks(_download_signals)
|
||||
return factory
|
||||
return self.httpclientfactory(request, timeout)
|
||||
|
||||
def _connect(self, factory):
|
||||
host, port = factory.host, factory.port
|
||||
|
@ -11,6 +11,8 @@ from twisted.python.failure import Failure
|
||||
from scrapy.exceptions import IgnoreRequest
|
||||
from scrapy.conf import settings
|
||||
from scrapy.utils.defer import mustbe_deferred
|
||||
from scrapy.utils.signal import send_catch_log
|
||||
from scrapy import signals
|
||||
from scrapy import log
|
||||
from .middleware import DownloaderMiddlewareManager
|
||||
from .handlers import DownloadHandlers
|
||||
@ -87,10 +89,12 @@ class Downloader(object):
|
||||
raise IgnoreRequest('Cannot fetch on a closing spider')
|
||||
|
||||
site.active.add(request)
|
||||
def _deactivate(_):
|
||||
def _deactivate(response):
|
||||
send_catch_log(signal=signals.response_received, \
|
||||
response=response, request=request, spider=spider)
|
||||
site.active.remove(request)
|
||||
self._close_if_idle(spider)
|
||||
return _
|
||||
return response
|
||||
|
||||
dfd = self.middleware.download(self.enqueue, request, spider)
|
||||
return dfd.addBoth(_deactivate)
|
||||
@ -100,7 +104,13 @@ class Downloader(object):
|
||||
site = self.sites[spider]
|
||||
if site.closing:
|
||||
raise IgnoreRequest
|
||||
deferred = defer.Deferred()
|
||||
|
||||
def _downloaded(response):
|
||||
send_catch_log(signal=signals.response_downloaded, \
|
||||
response=response, request=request, spider=spider)
|
||||
return response
|
||||
|
||||
deferred = defer.Deferred().addCallback(_downloaded)
|
||||
site.queue.append((request, deferred))
|
||||
self._process_queue(spider)
|
||||
return deferred
|
||||
|
@ -6,8 +6,6 @@ docs/topics/downloader-middleware.rst
|
||||
|
||||
"""
|
||||
|
||||
from scrapy import signals
|
||||
from scrapy.utils.signal import send_catch_log
|
||||
from scrapy import log
|
||||
from scrapy.http import Request, Response
|
||||
from scrapy.exceptions import NotConfigured
|
||||
@ -77,11 +75,7 @@ class DownloaderMiddlewareManager(object):
|
||||
'Middleware %s.process_response must return Response or Request, got %s' % \
|
||||
(method.im_self.__class__.__name__, type(response))
|
||||
if isinstance(response, Request):
|
||||
send_catch_log(signal=signals.response_received, \
|
||||
response=response, spider=spider)
|
||||
return response
|
||||
send_catch_log(signal=signals.response_received, \
|
||||
response=response, spider=spider)
|
||||
return response
|
||||
|
||||
def process_exception(_failure):
|
||||
|
@ -3,3 +3,5 @@ from scrapy.signals import *
|
||||
import warnings
|
||||
warnings.warn("scrapy.core.signals is deprecated and will be removed in Scrapy 0.11, use scrapy.signals instead", \
|
||||
DeprecationWarning, stacklevel=2)
|
||||
|
||||
request_uploaded = response_downloaded
|
||||
|
@ -11,7 +11,6 @@ spider_opened = object()
|
||||
spider_idle = object()
|
||||
spider_closed = object()
|
||||
request_received = object()
|
||||
request_uploaded = object()
|
||||
response_received = object()
|
||||
response_downloaded = object()
|
||||
item_scraped = object()
|
||||
|
Loading…
x
Reference in New Issue
Block a user