1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-21 07:52:49 +00:00

Merge remote-tracking branch 'upstream/master' into remove-six-code

This commit is contained in:
Eugenio Lacuesta 2019-12-04 15:43:02 -03:00
commit 2c010152c3
No known key found for this signature in database
GPG Key ID: DA3EF2D0913E9810
6 changed files with 20 additions and 9 deletions

View File

@ -275,6 +275,7 @@ coverage_ignore_pyobjects = [
# -------------------------------------
intersphinx_mapping = {
'coverage': ('https://coverage.readthedocs.io/en/stable', None),
'pytest': ('https://docs.pytest.org/en/latest', None),
'python': ('https://docs.python.org/3', None),
'sphinx': ('https://www.sphinx-doc.org/en/master', None),

View File

@ -243,14 +243,13 @@ the Python 3.6 :doc:`tox <tox:index>` environment using all your CPU cores::
tox -e py36 -- scrapy tests -n auto
To see coverage report install `coverage`_ (``pip install coverage``) and run:
To see coverage report install :doc:`coverage <coverage:index>`
(``pip install coverage``) and run:
``coverage report``
see output of ``coverage --help`` for more options like html or xml report.
.. _coverage: https://pypi.python.org/pypi/coverage
Writing tests
-------------

View File

@ -477,6 +477,8 @@ ItemLoader objects
.. attribute:: item
The :class:`~scrapy.item.Item` object being parsed by this Item Loader.
This is mostly used as a property so when attempting to override this
value, you may want to check out :attr:`default_item_class` first.
.. attribute:: context

View File

@ -1,4 +1,5 @@
import logging
import pprint
import signal
import warnings
@ -45,7 +46,8 @@ class Crawler(object):
logging.root.addHandler(handler)
d = dict(overridden_settings(self.settings))
logger.info("Overridden settings: %(settings)r", {'settings': d})
logger.info("Overridden settings:\n%(settings)s",
{'settings': pprint.pformat(d)})
if get_scrapy_root_handler() is not None:
# scrapy root handler already installed: update it with new settings

View File

@ -1,4 +1,3 @@
import os
import logging
from collections import defaultdict

View File

@ -1,11 +1,19 @@
from email.utils import formatdate
from twisted.internet import defer
from twisted.internet.error import TimeoutError, DNSLookupError, \
ConnectionRefusedError, ConnectionDone, ConnectError, \
ConnectionLost, TCPTimedOutError
from twisted.internet.error import (
ConnectError,
ConnectionDone,
ConnectionLost,
ConnectionRefusedError,
DNSLookupError,
TCPTimedOutError,
TimeoutError,
)
from twisted.web.client import ResponseFailed
from scrapy import signals
from scrapy.exceptions import NotConfigured, IgnoreRequest
from scrapy.exceptions import IgnoreRequest, NotConfigured
from scrapy.utils.misc import load_object