mirror of
https://github.com/scrapy/scrapy.git
synced 2025-03-15 13:12:09 +00:00
Merge pull request #4052 from elacuesta/dummy_stats_collector_fix_elapsed_time
Fix TypeError when using DummyStatsCollector
This commit is contained in:
commit
1650812060
@ -1,14 +1,16 @@
|
||||
"""
|
||||
Extension for collecting core stats like items scraped and start/finish times
|
||||
"""
|
||||
import datetime
|
||||
from datetime import datetime
|
||||
|
||||
from scrapy import signals
|
||||
|
||||
|
||||
class CoreStats(object):
|
||||
|
||||
def __init__(self, stats):
|
||||
self.stats = stats
|
||||
self.start_time = None
|
||||
|
||||
@classmethod
|
||||
def from_crawler(cls, crawler):
|
||||
@ -21,11 +23,12 @@ class CoreStats(object):
|
||||
return o
|
||||
|
||||
def spider_opened(self, spider):
|
||||
self.stats.set_value('start_time', datetime.datetime.utcnow(), spider=spider)
|
||||
self.start_time = datetime.utcnow()
|
||||
self.stats.set_value('start_time', self.start_time, spider=spider)
|
||||
|
||||
def spider_closed(self, spider, reason):
|
||||
finish_time = datetime.datetime.utcnow()
|
||||
elapsed_time = finish_time - self.stats.get_value('start_time')
|
||||
finish_time = datetime.utcnow()
|
||||
elapsed_time = finish_time - self.start_time
|
||||
elapsed_time_seconds = elapsed_time.total_seconds()
|
||||
self.stats.set_value('elapsed_time_seconds', elapsed_time_seconds, spider=spider)
|
||||
self.stats.set_value('finish_time', finish_time, spider=spider)
|
||||
|
@ -1,10 +1,59 @@
|
||||
from datetime import datetime
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
from scrapy.extensions.corestats import CoreStats
|
||||
from scrapy.spiders import Spider
|
||||
from scrapy.statscollectors import StatsCollector, DummyStatsCollector
|
||||
from scrapy.utils.test import get_crawler
|
||||
|
||||
|
||||
class CoreStatsExtensionTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.crawler = get_crawler(Spider)
|
||||
self.spider = self.crawler._create_spider('foo')
|
||||
|
||||
@mock.patch('scrapy.extensions.corestats.datetime')
|
||||
def test_core_stats_default_stats_collector(self, mock_datetime):
|
||||
fixed_datetime = datetime(2019, 12, 1, 11, 38)
|
||||
mock_datetime.utcnow = mock.Mock(return_value=fixed_datetime)
|
||||
self.crawler.stats = StatsCollector(self.crawler)
|
||||
ext = CoreStats.from_crawler(self.crawler)
|
||||
ext.spider_opened(self.spider)
|
||||
ext.item_scraped({}, self.spider)
|
||||
ext.response_received(self.spider)
|
||||
ext.item_dropped({}, self.spider, ZeroDivisionError())
|
||||
ext.spider_closed(self.spider, 'finished')
|
||||
self.assertEqual(
|
||||
ext.stats._stats,
|
||||
{
|
||||
'start_time': fixed_datetime,
|
||||
'finish_time': fixed_datetime,
|
||||
'item_scraped_count': 1,
|
||||
'response_received_count': 1,
|
||||
'item_dropped_count': 1,
|
||||
'item_dropped_reasons_count/ZeroDivisionError': 1,
|
||||
'finish_reason': 'finished',
|
||||
'elapsed_time_seconds': 0.0,
|
||||
}
|
||||
)
|
||||
|
||||
def test_core_stats_dummy_stats_collector(self):
|
||||
self.crawler.stats = DummyStatsCollector(self.crawler)
|
||||
ext = CoreStats.from_crawler(self.crawler)
|
||||
ext.spider_opened(self.spider)
|
||||
ext.item_scraped({}, self.spider)
|
||||
ext.response_received(self.spider)
|
||||
ext.item_dropped({}, self.spider, ZeroDivisionError())
|
||||
ext.spider_closed(self.spider, 'finished')
|
||||
self.assertEqual(ext.stats._stats, {})
|
||||
|
||||
|
||||
class StatsCollectorTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
Loading…
x
Reference in New Issue
Block a user