diff --git a/scrapy/trunk/scrapy/contrib/pbcluster/master/manager.py b/scrapy/trunk/scrapy/contrib/pbcluster/master/manager.py index b4e1e7fc7..c889facb0 100644 --- a/scrapy/trunk/scrapy/contrib/pbcluster/master/manager.py +++ b/scrapy/trunk/scrapy/contrib/pbcluster/master/manager.py @@ -106,7 +106,7 @@ class ClusterMaster(object): self.get_spider_groupsettings = lambda x: {} #load pending domains try: - self.pending = pickle.load( open("pending_cache_%s" % socket.gethostname(), "r") ) + self.pending = pickle.load( open(settings["CLUSTER_MASTER_CACHEFILE"], "r") ) except IOError: self.pending = [] @@ -216,4 +216,4 @@ class ClusterMaster(object): self.load_nodes() scrapyengine.addtask(self.update_nodes, settings.getint('CLUSTER_MASTER_POLL_INTERVAL')) def _engine_stopped(self): - pickle.dump( self.pending, open("pending_cache_%s" % socket.gethostname(), "w") ) \ No newline at end of file + pickle.dump( self.pending, open(settings["CLUSTER_MASTER_CACHEFILE"], "w") ) diff --git a/scrapy/trunk/scrapy/contrib/pbcluster/worker/manager.py b/scrapy/trunk/scrapy/contrib/pbcluster/worker/manager.py index 837ce34e7..07c43f66b 100644 --- a/scrapy/trunk/scrapy/contrib/pbcluster/worker/manager.py +++ b/scrapy/trunk/scrapy/contrib/pbcluster/worker/manager.py @@ -25,7 +25,7 @@ class ScrapyProcessProtocol(protocol.ProcessProtocol): self.scrapy_settings.update({'LOGFILE': self.logfile, 'CLUSTER_WORKER_ENABLED': '0', 'WEBCONSOLE_ENABLED': '0'}) for k in self.scrapy_settings: self.env["SCRAPY_%s" % k] = str(self.scrapy_settings[k]) - self.env["PYTHONPATH"] = ":".join(sys.path) + self.env["PYTHONPATH"] = ":".join(sys.path)#this is need so this crawl process knows where to locate local_scrapy_settings. def __str__(self): return "" % (self.domain, self.pid, self.status) @@ -93,7 +93,7 @@ class ClusterWorker(pb.Root): try: import pysvn c=pysvn.Client() - r = c.update(settings["SVN_WORKDIR"] or ".") + r = c.update(settings["CLUSTER_WORKER_SVNWORKDIR"] or ".") log.msg("Updated to revision %s." %r[0].number ) except: pass