mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-25 03:43:58 +00:00
Merge pull request #1284 from scrapy/crawler-cleanup
CrawlerProcess cleanup
This commit is contained in:
commit
90aa5c075d
@ -216,7 +216,7 @@ class CrawlerProcess(CrawlerRunner):
|
||||
signame = signal_names[signum]
|
||||
logger.info("Received %(signame)s, shutting down gracefully. Send again to force ",
|
||||
{'signame': signame})
|
||||
reactor.callFromThread(self.stop)
|
||||
reactor.callFromThread(self._graceful_stop_reactor)
|
||||
|
||||
def _signal_kill(self, signum, _):
|
||||
install_shutdown_handlers(signal.SIG_IGN)
|
||||
@ -242,16 +242,30 @@ class CrawlerProcess(CrawlerRunner):
|
||||
# Don't start the reactor if the deferreds are already fired
|
||||
if d.called:
|
||||
return
|
||||
d.addBoth(lambda _: self._stop_reactor())
|
||||
d.addBoth(self._stop_reactor)
|
||||
|
||||
cache_size = self.settings.getint('DNSCACHE_SIZE') if self.settings.getbool('DNSCACHE_ENABLED') else 0
|
||||
reactor.installResolver(CachingThreadedResolver(reactor, cache_size,
|
||||
self.settings.getfloat('DNS_TIMEOUT')))
|
||||
reactor.installResolver(self._get_dns_resolver())
|
||||
tp = reactor.getThreadPool()
|
||||
tp.adjustPoolsize(maxthreads=self.settings.getint('REACTOR_THREADPOOL_MAXSIZE'))
|
||||
reactor.addSystemEventTrigger('before', 'shutdown', self.stop)
|
||||
reactor.run(installSignalHandlers=False) # blocking call
|
||||
|
||||
def _get_dns_resolver(self):
|
||||
if self.settings.getbool('DNSCACHE_ENABLED'):
|
||||
cache_size = self.settings.getint('DNSCACHE_SIZE')
|
||||
else:
|
||||
cache_size = 0
|
||||
return CachingThreadedResolver(
|
||||
reactor=reactor,
|
||||
cache_size=cache_size,
|
||||
timeout=self.settings.getfloat('DNS_TIMEOUT')
|
||||
)
|
||||
|
||||
def _graceful_stop_reactor(self):
|
||||
d = self.stop()
|
||||
d.addBoth(self._stop_reactor)
|
||||
return d
|
||||
|
||||
def _stop_reactor(self, _=None):
|
||||
try:
|
||||
reactor.stop()
|
||||
|
Loading…
x
Reference in New Issue
Block a user