mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-26 07:24:06 +00:00
* multiple projects * uploading scrapy projects as Python eggs * scheduling spiders using a JSON API Documentation is added along with the code. Closes #218. --HG-- rename : debian/scrapy-service.default => debian/scrapyd.default rename : debian/scrapy-service.dirs => debian/scrapyd.dirs rename : debian/scrapy-service.install => debian/scrapyd.install rename : debian/scrapy-service.lintian-overrides => debian/scrapyd.lintian-overrides rename : debian/scrapy-service.postinst => debian/scrapyd.postinst rename : debian/scrapy-service.postrm => debian/scrapyd.postrm rename : debian/scrapy-service.upstart => debian/scrapyd.upstart rename : extras/scrapy.tac => extras/scrapyd.tac
25 lines
673 B
Python
25 lines
673 B
Python
import os
|
|
|
|
from zope.interface import implements
|
|
|
|
from .interfaces import IEnvironment
|
|
|
|
class Environment(object):
|
|
|
|
implements(IEnvironment)
|
|
|
|
def __init__(self, config):
|
|
self.dbs_dir = config.get('dbs_dir', 'dbs')
|
|
self.logs_dir = config.get('logs_dir', 'logs')
|
|
|
|
def get_environment(self, message, slot):
|
|
project = message['project']
|
|
env = os.environ.copy()
|
|
env['SCRAPY_PROJECT'] = project
|
|
dbpath = os.path.join(self.dbs_dir, '%s.db' % project)
|
|
env['SCRAPY_SQLITE_DB'] = dbpath
|
|
logpath = os.path.join(self.logs_dir, 'slot%s.log' % slot)
|
|
env['SCRAPY_LOG_FILE'] = logpath
|
|
return env
|
|
|