mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-14 14:05:01 +00:00
Merge pull request #4271 from wRAR/asyncio-signals
async def support for signal handlers that already supported Deferreds
This commit is contained in:
commit
a6ef065eb5
@ -153,3 +153,20 @@ def deferred_f_from_coro_f(coro_f):
|
||||
def f(*coro_args, **coro_kwargs):
|
||||
return deferred_from_coro(coro_f(*coro_args, **coro_kwargs))
|
||||
return f
|
||||
|
||||
|
||||
def maybeDeferred_coro(f, *args, **kw):
|
||||
""" Copy of defer.maybeDeferred that also converts coroutines to Deferreds. """
|
||||
try:
|
||||
result = f(*args, **kw)
|
||||
except: # noqa: E722
|
||||
return defer.fail(failure.Failure(captureVars=defer.Deferred.debug))
|
||||
|
||||
if isinstance(result, defer.Deferred):
|
||||
return result
|
||||
elif _isfuture(result) or inspect.isawaitable(result):
|
||||
return deferred_from_coro(result)
|
||||
elif isinstance(result, failure.Failure):
|
||||
return defer.fail(result)
|
||||
else:
|
||||
return defer.succeed(result)
|
||||
|
@ -2,12 +2,14 @@
|
||||
|
||||
import logging
|
||||
|
||||
from twisted.internet.defer import maybeDeferred, DeferredList, Deferred
|
||||
from twisted.internet.defer import DeferredList, Deferred
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from pydispatch.dispatcher import Any, Anonymous, liveReceivers, \
|
||||
getAllReceivers, disconnect
|
||||
from pydispatch.robustapply import robustApply
|
||||
|
||||
from scrapy.utils.defer import maybeDeferred_coro
|
||||
from scrapy.utils.log import failure_to_exc_info
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -61,7 +63,7 @@ def send_catch_log_deferred(signal=Any, sender=Anonymous, *arguments, **named):
|
||||
spider = named.get('spider', None)
|
||||
dfds = []
|
||||
for receiver in liveReceivers(getAllReceivers(sender, signal)):
|
||||
d = maybeDeferred(robustApply, receiver, signal=signal, sender=sender,
|
||||
d = maybeDeferred_coro(robustApply, receiver, signal=signal, sender=sender,
|
||||
*arguments, **named)
|
||||
d.addErrback(logerror, receiver)
|
||||
d.addBoth(lambda result: (receiver, result))
|
||||
|
44
tests/test_signals.py
Normal file
44
tests/test_signals.py
Normal file
@ -0,0 +1,44 @@
|
||||
from pytest import mark
|
||||
from twisted.internet import defer
|
||||
from twisted.trial import unittest
|
||||
|
||||
from scrapy import signals, Request, Spider
|
||||
from scrapy.utils.test import get_crawler, get_from_asyncio_queue
|
||||
|
||||
from tests.mockserver import MockServer
|
||||
|
||||
|
||||
class ItemSpider(Spider):
|
||||
name = 'itemspider'
|
||||
|
||||
def start_requests(self):
|
||||
for index in range(10):
|
||||
yield Request(self.mockserver.url('/status?n=200&id=%d' % index),
|
||||
meta={'index': index})
|
||||
|
||||
def parse(self, response):
|
||||
return {'index': response.meta['index']}
|
||||
|
||||
|
||||
class AsyncSignalTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mockserver = MockServer()
|
||||
self.mockserver.__enter__()
|
||||
self.items = []
|
||||
|
||||
def tearDown(self):
|
||||
self.mockserver.__exit__(None, None, None)
|
||||
|
||||
async def _on_item_scraped(self, item):
|
||||
item = await get_from_asyncio_queue(item)
|
||||
self.items.append(item)
|
||||
|
||||
@mark.only_asyncio()
|
||||
@defer.inlineCallbacks
|
||||
def test_simple_pipeline(self):
|
||||
crawler = get_crawler(ItemSpider)
|
||||
crawler.signals.connect(self._on_item_scraped, signals.item_scraped)
|
||||
yield crawler.crawl(mockserver=self.mockserver)
|
||||
self.assertEqual(len(self.items), 10)
|
||||
for index in range(10):
|
||||
self.assertIn({'index': index}, self.items)
|
@ -1,3 +1,6 @@
|
||||
import asyncio
|
||||
|
||||
from pytest import mark
|
||||
from testfixtures import LogCapture
|
||||
from twisted.trial import unittest
|
||||
from twisted.python.failure import Failure
|
||||
@ -5,6 +8,7 @@ from twisted.internet import defer, reactor
|
||||
from pydispatch import dispatcher
|
||||
|
||||
from scrapy.utils.signal import send_catch_log, send_catch_log_deferred
|
||||
from scrapy.utils.test import get_from_asyncio_queue
|
||||
|
||||
|
||||
class SendCatchLogTest(unittest.TestCase):
|
||||
@ -54,7 +58,7 @@ class SendCatchLogDeferredTest(SendCatchLogTest):
|
||||
return send_catch_log_deferred(signal, *a, **kw)
|
||||
|
||||
|
||||
class SendCatchLogDeferredTest2(SendCatchLogTest):
|
||||
class SendCatchLogDeferredTest2(SendCatchLogDeferredTest):
|
||||
|
||||
def ok_handler(self, arg, handlers_called):
|
||||
handlers_called.add(self.ok_handler)
|
||||
@ -63,8 +67,24 @@ class SendCatchLogDeferredTest2(SendCatchLogTest):
|
||||
reactor.callLater(0, d.callback, "OK")
|
||||
return d
|
||||
|
||||
def _get_result(self, signal, *a, **kw):
|
||||
return send_catch_log_deferred(signal, *a, **kw)
|
||||
|
||||
class SendCatchLogDeferredAsyncDefTest(SendCatchLogDeferredTest):
|
||||
|
||||
async def ok_handler(self, arg, handlers_called):
|
||||
handlers_called.add(self.ok_handler)
|
||||
assert arg == 'test'
|
||||
await defer.succeed(42)
|
||||
return "OK"
|
||||
|
||||
|
||||
@mark.only_asyncio()
|
||||
class SendCatchLogDeferredAsyncioTest(SendCatchLogDeferredTest):
|
||||
|
||||
async def ok_handler(self, arg, handlers_called):
|
||||
handlers_called.add(self.ok_handler)
|
||||
assert arg == 'test'
|
||||
await asyncio.sleep(0.2)
|
||||
return await get_from_asyncio_queue("OK")
|
||||
|
||||
|
||||
class SendCatchLogTest2(unittest.TestCase):
|
||||
|
Loading…
x
Reference in New Issue
Block a user