1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-21 07:32:44 +00:00

added CLUSTER_MASTER_CACHEFILE setting, changed SVN_WORKDIR to

CLUSTER_WORKER_SVNWORKDIR

--HG--
extra : convert_revision : svn%3Ab85faa78-f9eb-468e-a121-7cced6da292c%4047
This commit is contained in:
olveyra 2008-07-07 18:00:35 +00:00
parent e78f7a93bd
commit 863d94adec
2 changed files with 4 additions and 4 deletions

View File

@ -106,7 +106,7 @@ class ClusterMaster(object):
self.get_spider_groupsettings = lambda x: {}
#load pending domains
try:
self.pending = pickle.load( open("pending_cache_%s" % socket.gethostname(), "r") )
self.pending = pickle.load( open(settings["CLUSTER_MASTER_CACHEFILE"], "r") )
except IOError:
self.pending = []
@ -216,4 +216,4 @@ class ClusterMaster(object):
self.load_nodes()
scrapyengine.addtask(self.update_nodes, settings.getint('CLUSTER_MASTER_POLL_INTERVAL'))
def _engine_stopped(self):
pickle.dump( self.pending, open("pending_cache_%s" % socket.gethostname(), "w") )
pickle.dump( self.pending, open(settings["CLUSTER_MASTER_CACHEFILE"], "w") )

View File

@ -25,7 +25,7 @@ class ScrapyProcessProtocol(protocol.ProcessProtocol):
self.scrapy_settings.update({'LOGFILE': self.logfile, 'CLUSTER_WORKER_ENABLED': '0', 'WEBCONSOLE_ENABLED': '0'})
for k in self.scrapy_settings:
self.env["SCRAPY_%s" % k] = str(self.scrapy_settings[k])
self.env["PYTHONPATH"] = ":".join(sys.path)
self.env["PYTHONPATH"] = ":".join(sys.path)#this is need so this crawl process knows where to locate local_scrapy_settings.
def __str__(self):
return "<ScrapyProcess domain=%s, pid=%s, status=%s>" % (self.domain, self.pid, self.status)
@ -93,7 +93,7 @@ class ClusterWorker(pb.Root):
try:
import pysvn
c=pysvn.Client()
r = c.update(settings["SVN_WORKDIR"] or ".")
r = c.update(settings["CLUSTER_WORKER_SVNWORKDIR"] or ".")
log.msg("Updated to revision %s." %r[0].number )
except:
pass