1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-25 12:43:41 +00:00

added from_crawler class method to SpiderManager

This commit is contained in:
Pablo Hoffman 2011-08-16 11:16:35 -03:00
parent a3697421c0
commit ee40aa1223
3 changed files with 7 additions and 7 deletions

View File

@ -1,6 +1,4 @@
from scrapy.command import ScrapyCommand
from scrapy.utils.misc import load_object
from scrapy.conf import settings
class Command(ScrapyCommand):
@ -11,7 +9,5 @@ class Command(ScrapyCommand):
return "List available spiders"
def run(self, args, opts):
spman_cls = load_object(settings['SPIDER_MANAGER_CLASS'])
spiders = spman_cls.from_settings(settings)
for s in spiders.list():
for s in self.crawler.spiders.list():
print s

View File

@ -33,7 +33,7 @@ class Crawler(object):
self.configured = True
self.extensions = ExtensionManager.from_crawler(self)
spman_cls = load_object(self.settings['SPIDER_MANAGER_CLASS'])
self.spiders = spman_cls.from_settings(self.settings)
self.spiders = spman_cls.from_crawler(self)
self.engine = ExecutionEngine(self, self._spider_closed)
def crawl(self, spider, requests=None):

View File

@ -5,7 +5,7 @@ spiders
from zope.interface import implements
from scrapy import log, signals
from scrapy import signals
from scrapy.interfaces import ISpiderManager
from scrapy.utils.misc import walk_modules
from scrapy.utils.spider import iter_spider_classes
@ -32,6 +32,10 @@ class SpiderManager(object):
def from_settings(cls, settings):
return cls(settings.getlist('SPIDER_MODULES'))
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def create(self, spider_name, **spider_kwargs):
try:
return self._spiders[spider_name](**spider_kwargs)