1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-23 11:44:06 +00:00

Merge pull request #775 from kmike/rename-sflo

rename "sflo" local variables to less cryptic "log_observer"
This commit is contained in:
Daniel Graña 2014-07-02 11:49:23 -03:00
commit e87dc37741
3 changed files with 13 additions and 13 deletions

View File

@ -135,12 +135,12 @@ class CrawlerProcess(object):
name, crawler = self.crawlers.popitem()
self._active_crawler = crawler
sflo = log.start_from_crawler(crawler)
log_observer = log.start_from_crawler(crawler)
crawler.configure()
crawler.install()
crawler.signals.connect(crawler.uninstall, signals.engine_stopped)
if sflo:
crawler.signals.connect(sflo.stop, signals.engine_stopped)
if log_observer:
crawler.signals.connect(log_observer.stop, signals.engine_stopped)
crawler.signals.connect(self._check_done, signals.engine_stopped)
crawler.start()
return name, crawler

View File

@ -114,12 +114,12 @@ def _get_log_level(level_name_or_id):
def start(logfile=None, loglevel='INFO', logstdout=True, logencoding='utf-8', crawler=None):
loglevel = _get_log_level(loglevel)
file = open(logfile, 'a') if logfile else sys.stderr
sflo = ScrapyFileLogObserver(file, loglevel, logencoding, crawler)
log_observer = ScrapyFileLogObserver(file, loglevel, logencoding, crawler)
_oldshowwarning = warnings.showwarning
log.startLoggingWithObserver(sflo.emit, setStdout=logstdout)
log.startLoggingWithObserver(log_observer.emit, setStdout=logstdout)
# restore warnings, wrongly silenced by Twisted
warnings.showwarning = _oldshowwarning
return sflo
return log_observer
def msg(message=None, _level=INFO, **kw):
kw['logLevel'] = kw.pop('level', _level)
@ -140,9 +140,9 @@ def start_from_settings(settings, crawler=None):
settings['LOG_ENCODING'], crawler)
def scrapy_info(settings):
sflo = start_from_settings(settings)
if sflo:
msg("Scrapy %s started (bot: %s)" % (scrapy.__version__, \
log_observer = start_from_settings(settings)
if log_observer:
msg("Scrapy %s started (bot: %s)" % (scrapy.__version__,
settings['BOT_NAME']))
msg("Optional features available: %s" % ", ".join(scrapy.optional_features),
@ -151,7 +151,7 @@ def scrapy_info(settings):
d = dict(overridden_settings(settings))
msg(format="Overridden settings: %(settings)r", settings=d, level=INFO)
sflo.stop()
log_observer.stop()
def start_from_crawler(crawler):
return start_from_settings(crawler.settings, crawler)

View File

@ -22,12 +22,12 @@ class ScrapyFileLogObserverTest(unittest.TestCase):
def setUp(self):
self.f = StringIO()
self.sflo = log.ScrapyFileLogObserver(self.f, self.level, self.encoding)
self.sflo.start()
self.log_observer = log.ScrapyFileLogObserver(self.f, self.level, self.encoding)
self.log_observer.start()
def tearDown(self):
self.flushLoggedErrors()
self.sflo.stop()
self.log_observer.stop()
def logged(self):
return self.f.getvalue().strip()[25:]