1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-26 12:43:48 +00:00

minor code rearrangement for consistency

This commit is contained in:
Pablo Hoffman 2011-07-26 18:49:01 -03:00
parent dd020e184f
commit cb9c937f50
2 changed files with 2 additions and 2 deletions

View File

@ -57,7 +57,6 @@ class Downloader(object):
self.slots = {} self.slots = {}
self.handlers = DownloadHandlers() self.handlers = DownloadHandlers()
self.middleware = DownloaderMiddlewareManager.from_settings(settings) self.middleware = DownloaderMiddlewareManager.from_settings(settings)
self.concurrent_spiders = settings.getint('CONCURRENT_SPIDERS')
def fetch(self, request, spider): def fetch(self, request, spider):
slot = self.slots[spider] slot = self.slots[spider]

View File

@ -59,6 +59,7 @@ class ExecutionEngine(object):
self.scheduler = load_object(settings['SCHEDULER'])() self.scheduler = load_object(settings['SCHEDULER'])()
self.downloader = Downloader() self.downloader = Downloader()
self.scraper = Scraper(self, self.settings) self.scraper = Scraper(self, self.settings)
self._concurrent_spiders = settings.getint('CONCURRENT_SPIDERS')
self._spider_closed_callback = spider_closed_callback self._spider_closed_callback = spider_closed_callback
@defer.inlineCallbacks @defer.inlineCallbacks
@ -164,7 +165,7 @@ class ExecutionEngine(object):
def has_capacity(self): def has_capacity(self):
"""Does the engine have capacity to handle more spiders""" """Does the engine have capacity to handle more spiders"""
return len(self.downloader.slots) < self.downloader.concurrent_spiders return len(self.slots) < self._concurrent_spiders
def crawl(self, request, spider): def crawl(self, request, spider):
assert spider in self.open_spiders, \ assert spider in self.open_spiders, \