1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-25 03:24:33 +00:00

Merge pull request #1267 from Curita/fix-1265

Fix #1265
This commit is contained in:
Daniel Graña 2015-06-01 20:31:46 -03:00
commit d52cf8bb03
4 changed files with 107 additions and 9 deletions

View File

@ -5,16 +5,30 @@ from operator import itemgetter
import six
from six.moves.configparser import SafeConfigParser
from scrapy.utils.deprecate import update_classpath
def build_component_list(base, custom):
def build_component_list(base, custom, convert=update_classpath):
"""Compose a component list based on a custom and base dict of components
(typically middlewares or extensions), unless custom is already a list, in
which case it's returned.
"""
def _check_components(complist):
if len({convert(c) for c in complist}) != len(complist):
raise ValueError('Some paths in {!r} convert to the same object, '
'please update your settings'.format(complist))
if isinstance(custom, (list, tuple)):
return custom
compdict = base.copy()
compdict.update(custom)
_check_components(custom)
return type(custom)(convert(c) for c in custom)
def _map_keys(compdict):
_check_components(compdict)
return {convert(k): v for k, v in six.iteritems(compdict)}
compdict = _map_keys(base)
compdict.update(_map_keys(custom))
items = (x for x in six.iteritems(compdict) if x[1] is not None)
return [x[0] for x in sorted(items, key=itemgetter(1))]

View File

@ -121,3 +121,37 @@ def _clspath(cls, forced=None):
if forced is not None:
return forced
return '{}.{}'.format(cls.__module__, cls.__name__)
DEPRECATION_RULES = [
('scrapy.contrib_exp.downloadermiddleware.decompression.', 'scrapy.downloadermiddlewares.decompression.'),
('scrapy.contrib_exp.iterators.', 'scrapy.utils.iterators.'),
('scrapy.contrib.downloadermiddleware.', 'scrapy.downloadermiddlewares.'),
('scrapy.contrib.exporter.', 'scrapy.exporters.'),
('scrapy.contrib.linkextractors.', 'scrapy.linkextractors.'),
('scrapy.contrib.loader.processor.', 'scrapy.loader.processors.'),
('scrapy.contrib.loader.', 'scrapy.loader.'),
('scrapy.contrib.pipeline.', 'scrapy.pipelines.'),
('scrapy.contrib.spidermiddleware.', 'scrapy.spidermiddlewares.'),
('scrapy.contrib.spiders.', 'scrapy.spiders.'),
('scrapy.contrib.', 'scrapy.extensions.'),
('scrapy.command.', 'scrapy.commands.'),
('scrapy.dupefilter.', 'scrapy.dupefilters.'),
('scrapy.linkextractor.', 'scrapy.linkextractors.'),
('scrapy.spider.', 'scrapy.spiders.'),
('scrapy.squeue.', 'scrapy.squeues.'),
('scrapy.statscol.', 'scrapy.statscollectors.'),
('scrapy.utils.decorator.', 'scrapy.utils.decorators.'),
('scrapy.spidermanager.SpiderManager', 'scrapy.spiderloader.SpiderLoader'),
]
def update_classpath(path):
"""Update a deprecated path from an object with its new location"""
for prefix, replacement in DEPRECATION_RULES:
if path.startswith(prefix):
new_path = path.replace(prefix, replacement, 1)
warnings.warn("`{}` class is deprecated, use `{}` instead".format(path, new_path),
ScrapyDeprecationWarning)
return new_path
return path

View File

@ -2,16 +2,41 @@ import unittest
from scrapy.utils.conf import build_component_list, arglist_to_dict
class UtilsConfTestCase(unittest.TestCase):
def test_build_component_list(self):
class BuildComponentListTest(unittest.TestCase):
def test_build_dict(self):
base = {'one': 1, 'two': 2, 'three': 3, 'five': 5, 'six': None}
custom = {'two': None, 'three': 8, 'four': 4}
self.assertEqual(build_component_list(base, custom),
self.assertEqual(build_component_list(base, custom, lambda x: x),
['one', 'four', 'five', 'three'])
def test_return_list(self):
custom = ['a', 'b', 'c']
self.assertEqual(build_component_list(base, custom), custom)
self.assertEqual(build_component_list(None, custom, lambda x: x),
custom)
def test_map_dict(self):
custom = {'one': 1, 'two': 2, 'three': 3}
self.assertEqual(build_component_list({}, custom, lambda x: x.upper()),
['ONE', 'TWO', 'THREE'])
def test_map_list(self):
custom = ['a', 'b', 'c']
self.assertEqual(build_component_list(None, custom, lambda x: x.upper()),
['A', 'B', 'C'])
def test_duplicate_components_in_dict(self):
duplicate_dict = {'one': 1, 'two': 2, 'ONE': 4}
self.assertRaises(ValueError,
build_component_list, {}, duplicate_dict, lambda x: x.lower())
def test_duplicate_components_in_list(self):
duplicate_list = ['a', 'b', 'a']
self.assertRaises(ValueError,
build_component_list, None, duplicate_list, lambda x: x)
class UtilsConfTestCase(unittest.TestCase):
def test_arglist_to_dict(self):
self.assertEqual(arglist_to_dict(['arg1=val1', 'arg2=val2']),

View File

@ -3,7 +3,7 @@ from __future__ import absolute_import
import inspect
import unittest
import warnings
from scrapy.utils.deprecate import create_deprecated_class
from scrapy.utils.deprecate import create_deprecated_class, update_classpath
from tests import mock
@ -248,3 +248,28 @@ class WarnWhenSubclassedTest(unittest.TestCase):
pass
self.assertIn("Error detecting parent module", str(w[0].message))
@mock.patch('scrapy.utils.deprecate.DEPRECATION_RULES',
[('scrapy.contrib.pipeline.', 'scrapy.pipelines.'),
('scrapy.contrib.', 'scrapy.extensions.')])
class UpdateClassPathTest(unittest.TestCase):
def test_old_path_gets_fixed(self):
with warnings.catch_warnings(record=True) as w:
output = update_classpath('scrapy.contrib.debug.Debug')
self.assertEqual(output, 'scrapy.extensions.debug.Debug')
self.assertEqual(len(w), 1)
self.assertIn("scrapy.contrib.debug.Debug", str(w[0].message))
self.assertIn("scrapy.extensions.debug.Debug", str(w[0].message))
def test_sorted_replacement(self):
with warnings.catch_warnings(record=True):
output = update_classpath('scrapy.contrib.pipeline.Pipeline')
self.assertEqual(output, 'scrapy.pipelines.Pipeline')
def test_unmatched_path_stays_the_same(self):
with warnings.catch_warnings(record=True) as w:
output = update_classpath('scrapy.unmatched.Path')
self.assertEqual(output, 'scrapy.unmatched.Path')
self.assertEqual(len(w), 0)