1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-06 11:00:46 +00:00

Remove datetime.utcnow() usage (#6014)

This commit is contained in:
Laerte Pereira 2023-08-21 10:51:49 -03:00 committed by GitHub
parent 3318971512
commit df2163ce6a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 11 additions and 11 deletions

View File

@ -1,7 +1,7 @@
"""
Extension for collecting core stats like items scraped and start/finish times
"""
from datetime import datetime
from datetime import datetime, timezone
from scrapy import signals
@ -22,11 +22,11 @@ class CoreStats:
return o
def spider_opened(self, spider):
self.start_time = datetime.utcnow()
self.start_time = datetime.now(tz=timezone.utc)
self.stats.set_value("start_time", self.start_time, spider=spider)
def spider_closed(self, spider, reason):
finish_time = datetime.utcnow()
finish_time = datetime.now(tz=timezone.utc)
elapsed_time = finish_time - self.start_time
elapsed_time_seconds = elapsed_time.total_seconds()
self.stats.set_value(

View File

@ -8,7 +8,7 @@ import logging
import re
import sys
import warnings
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path, PureWindowsPath
from tempfile import NamedTemporaryFile
from typing import IO, Any, Callable, Dict, List, Optional, Tuple, Union
@ -676,7 +676,7 @@ class FeedExporter:
params = {}
for k in dir(spider):
params[k] = getattr(spider, k)
utc_now = datetime.utcnow()
utc_now = datetime.now(tz=timezone.utc)
params["time"] = utc_now.replace(microsecond=0).isoformat().replace(":", "-")
params["batch_time"] = utc_now.isoformat().replace(":", "-")
params["batch_id"] = slot.batch_id + 1 if slot is not None else 1

View File

@ -1,4 +1,4 @@
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from pathlib import Path
from cryptography.hazmat.backends import default_backend
@ -50,8 +50,8 @@ def generate_keys():
.issuer_name(issuer)
.public_key(key.public_key())
.serial_number(random_serial_number())
.not_valid_before(datetime.utcnow())
.not_valid_after(datetime.utcnow() + timedelta(days=10))
.not_valid_before(datetime.now(tz=timezone.utc))
.not_valid_after(datetime.now(tz=timezone.utc) + timedelta(days=10))
.add_extension(
SubjectAlternativeName([DNSName("localhost")]),
critical=False,

View File

@ -1,5 +1,5 @@
import shutil
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path
from twisted.trial import unittest
@ -16,7 +16,7 @@ class SpiderStateTest(unittest.TestCase):
Path(jobdir).mkdir()
try:
spider = Spider(name="default")
dt = datetime.now()
dt = datetime.now(tz=timezone.utc)
ss = SpiderState(jobdir)
ss.spider_opened(spider)

View File

@ -16,7 +16,7 @@ class CoreStatsExtensionTest(unittest.TestCase):
@mock.patch("scrapy.extensions.corestats.datetime")
def test_core_stats_default_stats_collector(self, mock_datetime):
fixed_datetime = datetime(2019, 12, 1, 11, 38)
mock_datetime.utcnow = mock.Mock(return_value=fixed_datetime)
mock_datetime.now = mock.Mock(return_value=fixed_datetime)
self.crawler.stats = StatsCollector(self.crawler)
ext = CoreStats.from_crawler(self.crawler)
ext.spider_opened(self.spider)