From 841e9913dbbc35735d3a269402ed0996ecc19c30 Mon Sep 17 00:00:00 2001 From: Pablo Hoffman Date: Mon, 13 Jun 2011 16:58:51 -0300 Subject: [PATCH] renamed CLOSESPIDER_ITEMPASSED setting to CLOSESPIDER_ITEMCOUNT, to follow the refactoring done in r2630 --- docs/topics/extensions.rst | 10 +++++----- scrapy/contrib/closespider.py | 9 +++++---- scrapy/settings/default_settings.py | 2 +- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/docs/topics/extensions.rst b/docs/topics/extensions.rst index 671edbb22..5848471a2 100644 --- a/docs/topics/extensions.rst +++ b/docs/topics/extensions.rst @@ -280,7 +280,7 @@ The conditions for closing a spider can be configured through the following settings: * :setting:`CLOSESPIDER_TIMEOUT` -* :setting:`CLOSESPIDER_ITEMPASSED` +* :setting:`CLOSESPIDER_ITEMCOUNT` * :setting:`CLOSESPIDER_PAGECOUNT` * :setting:`CLOSESPIDER_ERRORCOUNT` @@ -296,16 +296,16 @@ more than that number of second, it will be automatically closed with the reason ``closespider_timeout``. If zero (or non set), spiders won't be closed by timeout. -.. setting:: CLOSESPIDER_ITEMPASSED +.. setting:: CLOSESPIDER_ITEMCOUNT -CLOSESPIDER_ITEMPASSED -"""""""""""""""""""""" +CLOSESPIDER_ITEMCOUNT +""""""""""""""""""""" Default: ``0`` An integer which specifies a number of items. If the spider scrapes more than that amount if items and those items are passed by the item pipeline, the -spider will be closed with the reason ``closespider_itempassed``. If zero (or +spider will be closed with the reason ``closespider_itemcount``. If zero (or non set), spiders won't be closed by number of passed items. .. setting:: CLOSESPIDER_PAGECOUNT diff --git a/scrapy/contrib/closespider.py b/scrapy/contrib/closespider.py index a30605eee..71dd1c1ec 100644 --- a/scrapy/contrib/closespider.py +++ b/scrapy/contrib/closespider.py @@ -18,7 +18,8 @@ class CloseSpider(object): def __init__(self): self.timeout = settings.getint('CLOSESPIDER_TIMEOUT') - self.itempassed = settings.getint('CLOSESPIDER_ITEMPASSED') + self.itemcount = settings.getint('CLOSESPIDER_ITEMCOUNT') or \ + settings.getint('CLOSESPIDER_ITEMPASSED') # XXX: legacy support self.pagecount = settings.getint('CLOSESPIDER_PAGECOUNT') self.errorcount = settings.getint('CLOSESPIDER_ERRORCOUNT') @@ -33,7 +34,7 @@ class CloseSpider(object): dispatcher.connect(self.page_count, signal=signals.response_received) if self.timeout: dispatcher.connect(self.spider_opened, signal=signals.spider_opened) - if self.itempassed: + if self.itemcount: dispatcher.connect(self.item_scraped, signal=signals.item_scraped) dispatcher.connect(self.spider_closed, signal=signals.spider_closed) @@ -57,8 +58,8 @@ class CloseSpider(object): def item_scraped(self, item, spider): self.counts[spider] += 1 - if self.counts[spider] == self.itempassed: - crawler.engine.close_spider(spider, 'closespider_itempassed') + if self.counts[spider] == self.itemcount: + crawler.engine.close_spider(spider, 'closespider_itemcount') def spider_closed(self, spider): self.counts.pop(spider, None) diff --git a/scrapy/settings/default_settings.py b/scrapy/settings/default_settings.py index 301d69017..0ac7e9216 100644 --- a/scrapy/settings/default_settings.py +++ b/scrapy/settings/default_settings.py @@ -21,7 +21,7 @@ BOT_VERSION = '1.0' CLOSESPIDER_TIMEOUT = 0 CLOSESPIDER_PAGECOUNT = 0 -CLOSESPIDER_ITEMPASSED = 0 +CLOSESPIDER_ITEMCOUNT = 0 COMMANDS_MODULE = ''