diff --git a/.travis.yml b/.travis.yml index 065f23805..4218d13bf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,6 +21,10 @@ matrix: env: TOXENV=py35 - python: 3.6 env: TOXENV=py36 + - python: 3.7 + env: TOXENV=py37 + dist: xenial + sudo: true - python: 3.6 env: TOXENV=docs install: diff --git a/requirements-py3.txt b/requirements-py3.txt index d76d9412f..b38c4cc09 100644 --- a/requirements-py3.txt +++ b/requirements-py3.txt @@ -1,4 +1,4 @@ -Twisted >= 17.9.0 +Twisted>=17.9.0 lxml>=3.2.4 pyOpenSSL>=0.13.1 cssselect>=0.9 diff --git a/scrapy/extensions/telnet.py b/scrapy/extensions/telnet.py index e78afa1fc..3024ddfaa 100644 --- a/scrapy/extensions/telnet.py +++ b/scrapy/extensions/telnet.py @@ -6,13 +6,15 @@ See documentation in docs/topics/telnetconsole.rst import pprint import logging +import traceback from twisted.internet import protocol try: from twisted.conch import manhole, telnet from twisted.conch.insults import insults TWISTED_CONCH_AVAILABLE = True -except ImportError: +except (ImportError, SyntaxError): + _TWISTED_CONCH_TRACEBACK = traceback.format_exc() TWISTED_CONCH_AVAILABLE = False from scrapy.exceptions import NotConfigured @@ -40,7 +42,9 @@ class TelnetConsole(protocol.ServerFactory): if not crawler.settings.getbool('TELNETCONSOLE_ENABLED'): raise NotConfigured if not TWISTED_CONCH_AVAILABLE: - raise NotConfigured + raise NotConfigured( + 'TELNETCONSOLE_ENABLED setting is True but required twisted ' + 'modules failed to import:\n' + _TWISTED_CONCH_TRACEBACK) self.crawler = crawler self.noisy = False self.portrange = [int(x) for x in crawler.settings.getlist('TELNETCONSOLE_PORT')] diff --git a/scrapy/utils/iterators.py b/scrapy/utils/iterators.py index 73857b410..a12e14005 100644 --- a/scrapy/utils/iterators.py +++ b/scrapy/utils/iterators.py @@ -98,8 +98,9 @@ def csviter(obj, delimiter=None, headers=None, encoding=None, quotechar=None): """ encoding = obj.encoding if isinstance(obj, TextResponse) else encoding or 'utf-8' - def _getrow(csv_r): - return [to_unicode(field, encoding) for field in next(csv_r)] + + def row_to_unicode(row_): + return [to_unicode(field, encoding) for field in row_] # Python 3 csv reader input object needs to return strings if six.PY3: @@ -113,10 +114,14 @@ def csviter(obj, delimiter=None, headers=None, encoding=None, quotechar=None): csv_r = csv.reader(lines, **kwargs) if not headers: - headers = _getrow(csv_r) + try: + row = next(csv_r) + except StopIteration: + return + headers = row_to_unicode(row) - while True: - row = _getrow(csv_r) + for row in csv_r: + row = row_to_unicode(row) if len(row) != len(headers): logger.warning("ignoring row %(csvlnum)d (length: %(csvrow)d, " "should be: %(csvheader)d)", diff --git a/tests/requirements-py3.txt b/tests/requirements-py3.txt index 51a25f5e5..8d9ce5231 100644 --- a/tests/requirements-py3.txt +++ b/tests/requirements-py3.txt @@ -1,6 +1,6 @@ -pytest==2.9.2 +pytest==3.6.3 pytest-twisted -pytest-cov==2.2.1 +pytest-cov==2.5.1 testfixtures jmespath leveldb diff --git a/tests/test_crawler.py b/tests/test_crawler.py index d3b80f460..6a8e11363 100644 --- a/tests/test_crawler.py +++ b/tests/test_crawler.py @@ -1,5 +1,4 @@ import logging -import os import tempfile import warnings import unittest @@ -14,8 +13,9 @@ from scrapy.spiderloader import SpiderLoader from scrapy.utils.log import configure_logging, get_scrapy_root_handler from scrapy.utils.spider import DefaultSpider from scrapy.utils.misc import load_object -from scrapy.utils.test import get_crawler from scrapy.extensions.throttle import AutoThrottle +from scrapy.extensions import telnet + class BaseCrawlerTest(unittest.TestCase): @@ -100,6 +100,8 @@ class CrawlerLoggingTestCase(unittest.TestCase): custom_settings = { 'LOG_LEVEL': 'INFO', 'LOG_FILE': log_file.name, + # disable telnet if not available to avoid an extra warning + 'TELNETCONSOLE_ENABLED': telnet.TWISTED_CONCH_AVAILABLE, } configure_logging() diff --git a/tests/test_utils_log.py b/tests/test_utils_log.py index 45527b03b..742e04803 100644 --- a/tests/test_utils_log.py +++ b/tests/test_utils_log.py @@ -10,6 +10,7 @@ from twisted.python.failure import Failure from scrapy.utils.log import (failure_to_exc_info, TopLevelFormatter, LogCounterHandler, StreamLogger) from scrapy.utils.test import get_crawler +from scrapy.extensions import telnet class FailureToExcInfoTest(unittest.TestCase): @@ -65,10 +66,14 @@ class TopLevelFormatterTest(unittest.TestCase): class LogCounterHandlerTest(unittest.TestCase): def setUp(self): + settings = {'LOG_LEVEL': 'WARNING'} + if not telnet.TWISTED_CONCH_AVAILABLE: + # disable it to avoid the extra warning + settings['TELNETCONSOLE_ENABLED'] = False self.logger = logging.getLogger('test') self.logger.setLevel(logging.NOTSET) self.logger.propagate = False - self.crawler = get_crawler(settings_dict={'LOG_LEVEL': 'WARNING'}) + self.crawler = get_crawler(settings_dict=settings) self.handler = LogCounterHandler(self.crawler) self.logger.addHandler(self.handler) diff --git a/tox.ini b/tox.ini index ee40983de..e5543fe2a 100644 --- a/tox.ini +++ b/tox.ini @@ -81,6 +81,10 @@ deps = {[testenv:py34]deps} basepython = python3.6 deps = {[testenv:py34]deps} +[testenv:py37] +basepython = python3.7 +deps = {[testenv:py34]deps} + [testenv:pypy3] basepython = pypy3 deps = {[testenv:py34]deps}