1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-24 01:04:05 +00:00

renamed CLOSESPIDER_ITEMPASSED setting to CLOSESPIDER_ITEMCOUNT, to follow the refactoring done in r2630

This commit is contained in:
Pablo Hoffman 2011-06-13 16:58:51 -03:00
parent 5dea6be513
commit 841e9913db
3 changed files with 11 additions and 10 deletions

View File

@ -280,7 +280,7 @@ The conditions for closing a spider can be configured through the following
settings:
* :setting:`CLOSESPIDER_TIMEOUT`
* :setting:`CLOSESPIDER_ITEMPASSED`
* :setting:`CLOSESPIDER_ITEMCOUNT`
* :setting:`CLOSESPIDER_PAGECOUNT`
* :setting:`CLOSESPIDER_ERRORCOUNT`
@ -296,16 +296,16 @@ more than that number of second, it will be automatically closed with the
reason ``closespider_timeout``. If zero (or non set), spiders won't be closed by
timeout.
.. setting:: CLOSESPIDER_ITEMPASSED
.. setting:: CLOSESPIDER_ITEMCOUNT
CLOSESPIDER_ITEMPASSED
""""""""""""""""""""""
CLOSESPIDER_ITEMCOUNT
"""""""""""""""""""""
Default: ``0``
An integer which specifies a number of items. If the spider scrapes more than
that amount if items and those items are passed by the item pipeline, the
spider will be closed with the reason ``closespider_itempassed``. If zero (or
spider will be closed with the reason ``closespider_itemcount``. If zero (or
non set), spiders won't be closed by number of passed items.
.. setting:: CLOSESPIDER_PAGECOUNT

View File

@ -18,7 +18,8 @@ class CloseSpider(object):
def __init__(self):
self.timeout = settings.getint('CLOSESPIDER_TIMEOUT')
self.itempassed = settings.getint('CLOSESPIDER_ITEMPASSED')
self.itemcount = settings.getint('CLOSESPIDER_ITEMCOUNT') or \
settings.getint('CLOSESPIDER_ITEMPASSED') # XXX: legacy support
self.pagecount = settings.getint('CLOSESPIDER_PAGECOUNT')
self.errorcount = settings.getint('CLOSESPIDER_ERRORCOUNT')
@ -33,7 +34,7 @@ class CloseSpider(object):
dispatcher.connect(self.page_count, signal=signals.response_received)
if self.timeout:
dispatcher.connect(self.spider_opened, signal=signals.spider_opened)
if self.itempassed:
if self.itemcount:
dispatcher.connect(self.item_scraped, signal=signals.item_scraped)
dispatcher.connect(self.spider_closed, signal=signals.spider_closed)
@ -57,8 +58,8 @@ class CloseSpider(object):
def item_scraped(self, item, spider):
self.counts[spider] += 1
if self.counts[spider] == self.itempassed:
crawler.engine.close_spider(spider, 'closespider_itempassed')
if self.counts[spider] == self.itemcount:
crawler.engine.close_spider(spider, 'closespider_itemcount')
def spider_closed(self, spider):
self.counts.pop(spider, None)

View File

@ -21,7 +21,7 @@ BOT_VERSION = '1.0'
CLOSESPIDER_TIMEOUT = 0
CLOSESPIDER_PAGECOUNT = 0
CLOSESPIDER_ITEMPASSED = 0
CLOSESPIDER_ITEMCOUNT = 0
COMMANDS_MODULE = ''