mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-26 07:03:49 +00:00
* multiple projects * uploading scrapy projects as Python eggs * scheduling spiders using a JSON API Documentation is added along with the code. Closes #218. --HG-- rename : debian/scrapy-service.default => debian/scrapyd.default rename : debian/scrapy-service.dirs => debian/scrapyd.dirs rename : debian/scrapy-service.install => debian/scrapyd.install rename : debian/scrapy-service.lintian-overrides => debian/scrapyd.lintian-overrides rename : debian/scrapy-service.postinst => debian/scrapyd.postinst rename : debian/scrapy-service.postrm => debian/scrapyd.postrm rename : debian/scrapy-service.upstart => debian/scrapyd.upstart rename : extras/scrapy.tac => extras/scrapyd.tac
24 lines
656 B
Python
24 lines
656 B
Python
from zope.interface import implements
|
|
|
|
from .interfaces import ISpiderScheduler
|
|
from .utils import get_spider_queues
|
|
|
|
class SpiderScheduler(object):
|
|
|
|
implements(ISpiderScheduler)
|
|
|
|
def __init__(self, config):
|
|
self.eggs_dir = config.get('eggs_dir', 'eggs')
|
|
self.dbs_dir = config.get('dbs_dir', 'dbs')
|
|
self.update_projects()
|
|
|
|
def schedule(self, project, spider_name, **spider_args):
|
|
q = self.queues[project]
|
|
q.add(spider_name, **spider_args)
|
|
|
|
def list_projects(self):
|
|
return self.queues.keys()
|
|
|
|
def update_projects(self):
|
|
self.queues = get_spider_queues(self.eggs_dir, self.dbs_dir)
|