mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-19 13:58:19 +00:00
fix typo
This commit is contained in:
parent
3769f75386
commit
4a91a5427d
@ -13,7 +13,7 @@ class SignalCatcherSpider(Spider):
|
|||||||
super(SignalCatcherSpider, self).__init__(*args, **kwargs)
|
super(SignalCatcherSpider, self).__init__(*args, **kwargs)
|
||||||
crawler.signals.connect(self.on_response_download,
|
crawler.signals.connect(self.on_response_download,
|
||||||
signal=request_left_downloader)
|
signal=request_left_downloader)
|
||||||
self.catched_times = 0
|
self.caught_times = 0
|
||||||
self.start_urls = [url]
|
self.start_urls = [url]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -22,7 +22,7 @@ class SignalCatcherSpider(Spider):
|
|||||||
return spider
|
return spider
|
||||||
|
|
||||||
def on_response_download(self, request, spider):
|
def on_response_download(self, request, spider):
|
||||||
self.catched_times = self.catched_times + 1
|
self.caught_times = self.caught_times + 1
|
||||||
|
|
||||||
|
|
||||||
class TestCatching(TestCase):
|
class TestCatching(TestCase):
|
||||||
@ -38,23 +38,23 @@ class TestCatching(TestCase):
|
|||||||
def test_success(self):
|
def test_success(self):
|
||||||
crawler = get_crawler(SignalCatcherSpider)
|
crawler = get_crawler(SignalCatcherSpider)
|
||||||
yield crawler.crawl(self.mockserver.url("/status?n=200"))
|
yield crawler.crawl(self.mockserver.url("/status?n=200"))
|
||||||
self.assertEqual(crawler.spider.catched_times, 1)
|
self.assertEqual(crawler.spider.caught_times, 1)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_timeout(self):
|
def test_timeout(self):
|
||||||
crawler = get_crawler(SignalCatcherSpider,
|
crawler = get_crawler(SignalCatcherSpider,
|
||||||
{'DOWNLOAD_TIMEOUT': 0.1})
|
{'DOWNLOAD_TIMEOUT': 0.1})
|
||||||
yield crawler.crawl(self.mockserver.url("/delay?n=0.2"))
|
yield crawler.crawl(self.mockserver.url("/delay?n=0.2"))
|
||||||
self.assertEqual(crawler.spider.catched_times, 1)
|
self.assertEqual(crawler.spider.caught_times, 1)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_disconnect(self):
|
def test_disconnect(self):
|
||||||
crawler = get_crawler(SignalCatcherSpider)
|
crawler = get_crawler(SignalCatcherSpider)
|
||||||
yield crawler.crawl(self.mockserver.url("/drop"))
|
yield crawler.crawl(self.mockserver.url("/drop"))
|
||||||
self.assertEqual(crawler.spider.catched_times, 1)
|
self.assertEqual(crawler.spider.caught_times, 1)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_noconnect(self):
|
def test_noconnect(self):
|
||||||
crawler = get_crawler(SignalCatcherSpider)
|
crawler = get_crawler(SignalCatcherSpider)
|
||||||
yield crawler.crawl('http://thereisdefinetelynosuchdomain.com')
|
yield crawler.crawl('http://thereisdefinetelynosuchdomain.com')
|
||||||
self.assertEqual(crawler.spider.catched_times, 1)
|
self.assertEqual(crawler.spider.caught_times, 1)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user