1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-24 13:03:47 +00:00

Log full cache file path instead of cache directory for the storages that cache to single files.

This commit is contained in:
jorenham 2017-03-03 15:15:59 +01:00
parent f96490df2c
commit 42b429dc37

View File

@ -220,12 +220,12 @@ class DbmCacheStorage(object):
self.dbmodule = import_module(settings['HTTPCACHE_DBM_MODULE'])
self.db = None
logger.debug("Using DBM cache storage in %(cachedir)s" % {'cachedir': self.cachedir})
def open_spider(self, spider):
dbpath = os.path.join(self.cachedir, '%s.db' % spider.name)
self.db = self.dbmodule.open(dbpath, 'c')
logger.debug("Using DBM cache storage in %(cachepath)s" % {'cachepath': dbpath}, extra={'spider': spider})
def close_spider(self, spider):
self.db.close()
@ -352,12 +352,12 @@ class LeveldbCacheStorage(object):
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
self.db = None
logger.debug("Using LevelDB cache storage in %(cachedir)s" % {'cachedir': self.cachedir})
def open_spider(self, spider):
dbpath = os.path.join(self.cachedir, '%s.leveldb' % spider.name)
self.db = self._leveldb.LevelDB(dbpath)
logger.debug("Using LevelDB cache storage in %(cachepath)s" % {'cachepath': dbpath}, extra={'spider': spider})
def close_spider(self, spider):
# Do compactation each time to save space and also recreate files to
# avoid them being removed in storages with timestamp-based autoremoval.