mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-24 13:44:20 +00:00
Raise error when settings module is missing.
Previously, it failed silently if an ImportError was caught when trying to import the scrapy settings module. This not only happened when the scrapy settings module itself was missing, but also when it tried to import a missing module, which made the whole thing a bad idea. A side effect of this change (not required, but for simplification) is that we no longer support the default "scrapy_settings" name for the scrapy settings module, but this was never used afaik.
This commit is contained in:
parent
ce80e5c792
commit
dd13dfe82b
@ -51,10 +51,10 @@ def get_project_settings():
|
||||
if ENVVAR not in os.environ:
|
||||
project = os.environ.get('SCRAPY_PROJECT', 'default')
|
||||
init_env(project)
|
||||
settings_module_path = os.environ.get(ENVVAR, 'scrapy_settings')
|
||||
try:
|
||||
settings_module_path = os.environ.get(ENVVAR)
|
||||
if settings_module_path:
|
||||
settings_module = __import__(settings_module_path, {}, {}, [''])
|
||||
except ImportError:
|
||||
else:
|
||||
settings_module = None
|
||||
settings = CrawlerSettings(settings_module)
|
||||
|
||||
|
@ -9,9 +9,10 @@ class ProcessTest(object):
|
||||
prefix = [sys.executable, '-m', 'scrapy.cmdline']
|
||||
cwd = os.getcwd() # trial chdirs to temp dir
|
||||
|
||||
def execute(self, args, check_code=True, settings='missing'):
|
||||
def execute(self, args, check_code=True, settings=None):
|
||||
env = os.environ.copy()
|
||||
env['SCRAPY_SETTINGS_MODULE'] = settings
|
||||
if settings is not None:
|
||||
env['SCRAPY_SETTINGS_MODULE'] = settings
|
||||
cmd = self.prefix + [self.command] + list(args)
|
||||
pp = TestProcessProtocol()
|
||||
pp.deferred.addBoth(self._process_finished, cmd, check_code)
|
||||
|
Loading…
x
Reference in New Issue
Block a user