1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-24 04:44:26 +00:00

Merge pull request #3326 from lopuhin/patiences-master

Python 3.7 support
This commit is contained in:
Mikhail Korobov 2018-07-11 17:36:55 +05:00 committed by GitHub
commit 8d5320dcd2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 37 additions and 13 deletions

View File

@ -21,6 +21,10 @@ matrix:
env: TOXENV=py35
- python: 3.6
env: TOXENV=py36
- python: 3.7
env: TOXENV=py37
dist: xenial
sudo: true
- python: 3.6
env: TOXENV=docs
install:

View File

@ -1,4 +1,4 @@
Twisted >= 17.9.0
Twisted>=17.9.0
lxml>=3.2.4
pyOpenSSL>=0.13.1
cssselect>=0.9

View File

@ -6,13 +6,15 @@ See documentation in docs/topics/telnetconsole.rst
import pprint
import logging
import traceback
from twisted.internet import protocol
try:
from twisted.conch import manhole, telnet
from twisted.conch.insults import insults
TWISTED_CONCH_AVAILABLE = True
except ImportError:
except (ImportError, SyntaxError):
_TWISTED_CONCH_TRACEBACK = traceback.format_exc()
TWISTED_CONCH_AVAILABLE = False
from scrapy.exceptions import NotConfigured
@ -40,7 +42,9 @@ class TelnetConsole(protocol.ServerFactory):
if not crawler.settings.getbool('TELNETCONSOLE_ENABLED'):
raise NotConfigured
if not TWISTED_CONCH_AVAILABLE:
raise NotConfigured
raise NotConfigured(
'TELNETCONSOLE_ENABLED setting is True but required twisted '
'modules failed to import:\n' + _TWISTED_CONCH_TRACEBACK)
self.crawler = crawler
self.noisy = False
self.portrange = [int(x) for x in crawler.settings.getlist('TELNETCONSOLE_PORT')]

View File

@ -98,8 +98,9 @@ def csviter(obj, delimiter=None, headers=None, encoding=None, quotechar=None):
"""
encoding = obj.encoding if isinstance(obj, TextResponse) else encoding or 'utf-8'
def _getrow(csv_r):
return [to_unicode(field, encoding) for field in next(csv_r)]
def row_to_unicode(row_):
return [to_unicode(field, encoding) for field in row_]
# Python 3 csv reader input object needs to return strings
if six.PY3:
@ -113,10 +114,14 @@ def csviter(obj, delimiter=None, headers=None, encoding=None, quotechar=None):
csv_r = csv.reader(lines, **kwargs)
if not headers:
headers = _getrow(csv_r)
try:
row = next(csv_r)
except StopIteration:
return
headers = row_to_unicode(row)
while True:
row = _getrow(csv_r)
for row in csv_r:
row = row_to_unicode(row)
if len(row) != len(headers):
logger.warning("ignoring row %(csvlnum)d (length: %(csvrow)d, "
"should be: %(csvheader)d)",

View File

@ -1,6 +1,6 @@
pytest==2.9.2
pytest==3.6.3
pytest-twisted
pytest-cov==2.2.1
pytest-cov==2.5.1
testfixtures
jmespath
leveldb

View File

@ -1,5 +1,4 @@
import logging
import os
import tempfile
import warnings
import unittest
@ -14,8 +13,9 @@ from scrapy.spiderloader import SpiderLoader
from scrapy.utils.log import configure_logging, get_scrapy_root_handler
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
from scrapy.utils.test import get_crawler
from scrapy.extensions.throttle import AutoThrottle
from scrapy.extensions import telnet
class BaseCrawlerTest(unittest.TestCase):
@ -100,6 +100,8 @@ class CrawlerLoggingTestCase(unittest.TestCase):
custom_settings = {
'LOG_LEVEL': 'INFO',
'LOG_FILE': log_file.name,
# disable telnet if not available to avoid an extra warning
'TELNETCONSOLE_ENABLED': telnet.TWISTED_CONCH_AVAILABLE,
}
configure_logging()

View File

@ -10,6 +10,7 @@ from twisted.python.failure import Failure
from scrapy.utils.log import (failure_to_exc_info, TopLevelFormatter,
LogCounterHandler, StreamLogger)
from scrapy.utils.test import get_crawler
from scrapy.extensions import telnet
class FailureToExcInfoTest(unittest.TestCase):
@ -65,10 +66,14 @@ class TopLevelFormatterTest(unittest.TestCase):
class LogCounterHandlerTest(unittest.TestCase):
def setUp(self):
settings = {'LOG_LEVEL': 'WARNING'}
if not telnet.TWISTED_CONCH_AVAILABLE:
# disable it to avoid the extra warning
settings['TELNETCONSOLE_ENABLED'] = False
self.logger = logging.getLogger('test')
self.logger.setLevel(logging.NOTSET)
self.logger.propagate = False
self.crawler = get_crawler(settings_dict={'LOG_LEVEL': 'WARNING'})
self.crawler = get_crawler(settings_dict=settings)
self.handler = LogCounterHandler(self.crawler)
self.logger.addHandler(self.handler)

View File

@ -81,6 +81,10 @@ deps = {[testenv:py34]deps}
basepython = python3.6
deps = {[testenv:py34]deps}
[testenv:py37]
basepython = python3.7
deps = {[testenv:py34]deps}
[testenv:pypy3]
basepython = pypy3
deps = {[testenv:py34]deps}