mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-24 13:23:59 +00:00
replaced __import__ by importlib.import_module.
Since python 2.7, importlib.import_module is the recommended way to import modules programmatically. From __import__'s doc: Import a module. Because this function is meant for use by the Python interpreter and not for general use it is better to use importlib.import_module() to programmatically import a module.
This commit is contained in:
parent
bd79b6e1d3
commit
343f997ed6
@ -2,6 +2,8 @@ from __future__ import print_function
|
||||
import os
|
||||
import shutil
|
||||
import string
|
||||
|
||||
from importlib import import_module
|
||||
from os.path import join, dirname, abspath, exists, splitext
|
||||
|
||||
import scrapy
|
||||
@ -90,7 +92,7 @@ class Command(ScrapyCommand):
|
||||
'classname': '%sSpider' % ''.join([s.capitalize() \
|
||||
for s in module.split('_')])
|
||||
}
|
||||
spiders_module = __import__(self.settings['NEWSPIDER_MODULE'], {}, {}, [''])
|
||||
spiders_module = import_module(self.settings['NEWSPIDER_MODULE'])
|
||||
spiders_dir = abspath(dirname(spiders_module.__file__))
|
||||
spider_file = "%s.py" % join(spiders_dir, module)
|
||||
shutil.copyfile(template_file, spider_file)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import sys
|
||||
import os
|
||||
from importlib import import_module
|
||||
|
||||
from scrapy.utils.spider import iter_spider_classes
|
||||
from scrapy.command import ScrapyCommand
|
||||
@ -15,7 +16,7 @@ def _import_file(filepath):
|
||||
if dirname:
|
||||
sys.path = [dirname] + sys.path
|
||||
try:
|
||||
module = __import__(fname, {}, {}, [''])
|
||||
module = import_module(fname)
|
||||
finally:
|
||||
if dirname:
|
||||
sys.path.pop(0)
|
||||
|
@ -1,6 +1,7 @@
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import cPickle as pickle
|
||||
from importlib import import_module
|
||||
from time import time
|
||||
from weakref import WeakKeyDictionary
|
||||
from email.utils import mktime_tz, parsedate_tz
|
||||
@ -164,7 +165,7 @@ class DbmCacheStorage(object):
|
||||
def __init__(self, settings):
|
||||
self.cachedir = data_path(settings['HTTPCACHE_DIR'], createdir=True)
|
||||
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
|
||||
self.dbmodule = __import__(settings['HTTPCACHE_DBM_MODULE'], {}, {}, [''])
|
||||
self.dbmodule = import_module(settings['HTTPCACHE_DBM_MODULE'])
|
||||
self.db = None
|
||||
|
||||
def open_spider(self, spider):
|
||||
|
@ -3,7 +3,7 @@ from twisted.trial import unittest
|
||||
class ScrapyUtilsTest(unittest.TestCase):
|
||||
def test_required_openssl_version(self):
|
||||
try:
|
||||
module = __import__('OpenSSL', {}, {}, [''])
|
||||
module = __import__('OpenSSL')
|
||||
except ImportError as ex:
|
||||
raise unittest.SkipTest("OpenSSL is not available")
|
||||
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
import re
|
||||
import hashlib
|
||||
|
||||
from importlib import import_module
|
||||
from pkgutil import iter_modules
|
||||
|
||||
from w3lib.html import remove_entities
|
||||
@ -35,7 +37,7 @@ def load_object(path):
|
||||
|
||||
module, name = path[:dot], path[dot+1:]
|
||||
try:
|
||||
mod = __import__(module, {}, {}, [''])
|
||||
mod = import_module(module)
|
||||
except ImportError as e:
|
||||
raise ImportError("Error loading object '%s': %s" % (path, e))
|
||||
|
||||
@ -55,7 +57,7 @@ def walk_modules(path, load=False):
|
||||
"""
|
||||
|
||||
mods = []
|
||||
mod = __import__(path, {}, {}, [''])
|
||||
mod = import_module(path)
|
||||
mods.append(mod)
|
||||
if hasattr(mod, '__path__'):
|
||||
for _, subpath, ispkg in iter_modules(mod.__path__):
|
||||
@ -63,7 +65,7 @@ def walk_modules(path, load=False):
|
||||
if ispkg:
|
||||
mods += walk_modules(fullpath)
|
||||
else:
|
||||
submod = __import__(fullpath, {}, {}, [''])
|
||||
submod = import_module(fullpath)
|
||||
mods.append(submod)
|
||||
return mods
|
||||
|
||||
|
@ -1,8 +1,10 @@
|
||||
import os
|
||||
from os.path import join, dirname, abspath, isabs, exists
|
||||
import cPickle as pickle
|
||||
import warnings
|
||||
|
||||
from importlib import import_module
|
||||
from os.path import join, dirname, abspath, isabs, exists
|
||||
|
||||
from scrapy.utils.conf import closest_scrapy_cfg, get_config, init_env
|
||||
from scrapy.settings import CrawlerSettings
|
||||
from scrapy.exceptions import NotConfigured
|
||||
@ -14,7 +16,7 @@ def inside_project():
|
||||
scrapy_module = os.environ.get('SCRAPY_SETTINGS_MODULE')
|
||||
if scrapy_module is not None:
|
||||
try:
|
||||
__import__(scrapy_module)
|
||||
import_module(scrapy_module)
|
||||
except ImportError as exc:
|
||||
warnings.warn("Cannot import scrapy settings module %s: %s" % (scrapy_module, exc))
|
||||
else:
|
||||
@ -53,7 +55,7 @@ def get_project_settings():
|
||||
init_env(project)
|
||||
settings_module_path = os.environ.get(ENVVAR)
|
||||
if settings_module_path:
|
||||
settings_module = __import__(settings_module_path, {}, {}, [''])
|
||||
settings_module = import_module(settings_module_path)
|
||||
else:
|
||||
settings_module = None
|
||||
settings = CrawlerSettings(settings_module)
|
||||
|
Loading…
x
Reference in New Issue
Block a user