1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-26 18:44:12 +00:00

more reliable way to set subprocess PYTHONPATH in tests

This commit is contained in:
Pablo Hoffman 2012-08-30 10:09:20 -03:00
parent 4a5f70278f
commit 3904f5f55a
3 changed files with 16 additions and 9 deletions

View File

@ -3,12 +3,13 @@ import os
from subprocess import Popen, PIPE
import unittest
from scrapy.utils.test import get_pythonpath
class CmdlineTest(unittest.TestCase):
def setUp(self):
self.env = os.environ.copy()
if 'PYTHONPATH' in os.environ:
self.env['PYTHONPATH'] = os.environ['PYTHONPATH']
self.env['PYTHONPATH'] = get_pythonpath()
self.env['SCRAPY_SETTINGS_MODULE'] = 'scrapy.tests.test_cmdline.settings'
def _execute(self, *new_args, **kwargs):

View File

@ -8,7 +8,7 @@ from tempfile import mkdtemp
from twisted.trial import unittest
from scrapy.utils.python import retry_on_eintr
from scrapy.utils.test import get_pythonpath
class ProjectTest(unittest.TestCase):
project_name = 'testproject'
@ -19,8 +19,7 @@ class ProjectTest(unittest.TestCase):
self.proj_path = join(self.temp_path, self.project_name)
self.proj_mod_path = join(self.proj_path, self.project_name)
self.env = os.environ.copy()
if 'PYTHONPATH' in os.environ:
self.env['PYTHONPATH'] = os.environ['PYTHONPATH']
self.env['PYTHONPATH'] = get_pythonpath()
def tearDown(self):
rmtree(self.temp_path)

View File

@ -2,13 +2,10 @@
This module contains some assorted functions used in tests
"""
import os
import os, sys
from twisted.trial.unittest import SkipTest
from scrapy.crawler import Crawler
from scrapy.settings import CrawlerSettings
def libxml2debug(testfunction):
"""Decorator for debugging libxml2 memory leaks inside a function.
@ -51,6 +48,9 @@ def get_crawler(settings_dict=None):
will be used as the settings present in the settings module of the
CrawlerSettings.
"""
from scrapy.crawler import Crawler
from scrapy.settings import CrawlerSettings
class SettingsModuleMock(object):
pass
settings_module = SettingsModuleMock()
@ -59,3 +59,10 @@ def get_crawler(settings_dict=None):
setattr(settings_module, k, v)
settings = CrawlerSettings(settings_module)
return Crawler(settings)
def get_pythonpath():
"""Return a PYTHONPATH suitable to use in processes so that they find this
installation of Scrapy"""
sep = ';' if sys.platform == 'win32' else ':'
scrapy_path = __import__('scrapy').__path__[0]
return os.path.dirname(scrapy_path) + sep + os.environ.get('PYTHONPATH', '')