mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-22 05:13:41 +00:00
306 lines
11 KiB
Python
306 lines
11 KiB
Python
import inspect
|
|
import os
|
|
import sys
|
|
import subprocess
|
|
import tempfile
|
|
from os.path import exists, join, abspath
|
|
from shutil import rmtree, copytree
|
|
from tempfile import mkdtemp
|
|
from contextlib import contextmanager
|
|
from threading import Timer
|
|
|
|
from twisted.trial import unittest
|
|
|
|
import scrapy
|
|
from scrapy.utils.python import to_unicode
|
|
from scrapy.utils.test import get_testenv
|
|
from tests.test_crawler import ExceptionSpider, NoRequestsSpider
|
|
|
|
|
|
class ProjectTest(unittest.TestCase):
|
|
project_name = 'testproject'
|
|
|
|
def setUp(self):
|
|
self.temp_path = mkdtemp()
|
|
self.cwd = self.temp_path
|
|
self.proj_path = join(self.temp_path, self.project_name)
|
|
self.proj_mod_path = join(self.proj_path, self.project_name)
|
|
self.env = get_testenv()
|
|
|
|
def tearDown(self):
|
|
rmtree(self.temp_path)
|
|
|
|
def call(self, *new_args, **kwargs):
|
|
with tempfile.TemporaryFile() as out:
|
|
args = (sys.executable, '-m', 'scrapy.cmdline') + new_args
|
|
return subprocess.call(args, stdout=out, stderr=out, cwd=self.cwd,
|
|
env=self.env, **kwargs)
|
|
|
|
def proc(self, *new_args, **popen_kwargs):
|
|
args = (sys.executable, '-m', 'scrapy.cmdline') + new_args
|
|
p = subprocess.Popen(args, cwd=self.cwd, env=self.env,
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
|
**popen_kwargs)
|
|
|
|
def kill_proc():
|
|
p.kill()
|
|
assert False, 'Command took too much time to complete'
|
|
|
|
timer = Timer(15, kill_proc)
|
|
try:
|
|
timer.start()
|
|
stdout, stderr = p.communicate()
|
|
finally:
|
|
timer.cancel()
|
|
|
|
return p, to_unicode(stdout), to_unicode(stderr)
|
|
|
|
|
|
class StartprojectTest(ProjectTest):
|
|
|
|
def test_startproject(self):
|
|
self.assertEqual(0, self.call('startproject', self.project_name))
|
|
|
|
assert exists(join(self.proj_path, 'scrapy.cfg'))
|
|
assert exists(join(self.proj_path, 'testproject'))
|
|
assert exists(join(self.proj_mod_path, '__init__.py'))
|
|
assert exists(join(self.proj_mod_path, 'items.py'))
|
|
assert exists(join(self.proj_mod_path, 'pipelines.py'))
|
|
assert exists(join(self.proj_mod_path, 'settings.py'))
|
|
assert exists(join(self.proj_mod_path, 'spiders', '__init__.py'))
|
|
|
|
self.assertEqual(1, self.call('startproject', self.project_name))
|
|
self.assertEqual(1, self.call('startproject', 'wrong---project---name'))
|
|
self.assertEqual(1, self.call('startproject', 'sys'))
|
|
|
|
def test_startproject_with_project_dir(self):
|
|
project_dir = mkdtemp()
|
|
self.assertEqual(0, self.call('startproject', self.project_name, project_dir))
|
|
|
|
assert exists(join(abspath(project_dir), 'scrapy.cfg'))
|
|
assert exists(join(abspath(project_dir), 'testproject'))
|
|
assert exists(join(join(abspath(project_dir), self.project_name), '__init__.py'))
|
|
assert exists(join(join(abspath(project_dir), self.project_name), 'items.py'))
|
|
assert exists(join(join(abspath(project_dir), self.project_name), 'pipelines.py'))
|
|
assert exists(join(join(abspath(project_dir), self.project_name), 'settings.py'))
|
|
assert exists(join(join(abspath(project_dir), self.project_name), 'spiders', '__init__.py'))
|
|
|
|
self.assertEqual(0, self.call('startproject', self.project_name, project_dir + '2'))
|
|
|
|
self.assertEqual(1, self.call('startproject', self.project_name, project_dir))
|
|
self.assertEqual(1, self.call('startproject', self.project_name + '2', project_dir))
|
|
self.assertEqual(1, self.call('startproject', 'wrong---project---name'))
|
|
self.assertEqual(1, self.call('startproject', 'sys'))
|
|
self.assertEqual(2, self.call('startproject'))
|
|
self.assertEqual(2, self.call('startproject', self.project_name, project_dir, 'another_params'))
|
|
|
|
|
|
class StartprojectTemplatesTest(ProjectTest):
|
|
|
|
def setUp(self):
|
|
super(StartprojectTemplatesTest, self).setUp()
|
|
self.tmpl = join(self.temp_path, 'templates')
|
|
self.tmpl_proj = join(self.tmpl, 'project')
|
|
|
|
def test_startproject_template_override(self):
|
|
copytree(join(scrapy.__path__[0], 'templates'), self.tmpl)
|
|
with open(join(self.tmpl_proj, 'root_template'), 'w'):
|
|
pass
|
|
assert exists(join(self.tmpl_proj, 'root_template'))
|
|
|
|
args = ['--set', 'TEMPLATES_DIR=%s' % self.tmpl]
|
|
p, out, err = self.proc('startproject', self.project_name, *args)
|
|
self.assertIn("New Scrapy project '%s', using template directory"
|
|
% self.project_name, out)
|
|
self.assertIn(self.tmpl_proj, out)
|
|
assert exists(join(self.proj_path, 'root_template'))
|
|
|
|
|
|
class CommandTest(ProjectTest):
|
|
|
|
def setUp(self):
|
|
super(CommandTest, self).setUp()
|
|
self.call('startproject', self.project_name)
|
|
self.cwd = join(self.temp_path, self.project_name)
|
|
self.env['SCRAPY_SETTINGS_MODULE'] = '%s.settings' % self.project_name
|
|
|
|
|
|
class GenspiderCommandTest(CommandTest):
|
|
|
|
def test_arguments(self):
|
|
# only pass one argument. spider script shouldn't be created
|
|
self.assertEqual(2, self.call('genspider', 'test_name'))
|
|
assert not exists(join(self.proj_mod_path, 'spiders', 'test_name.py'))
|
|
# pass two arguments <name> <domain>. spider script should be created
|
|
self.assertEqual(0, self.call('genspider', 'test_name', 'test.com'))
|
|
assert exists(join(self.proj_mod_path, 'spiders', 'test_name.py'))
|
|
|
|
def test_template(self, tplname='crawl'):
|
|
args = ['--template=%s' % tplname] if tplname else []
|
|
spname = 'test_spider'
|
|
p, out, err = self.proc('genspider', spname, 'test.com', *args)
|
|
self.assertIn("Created spider %r using template %r in module" % (spname, tplname), out)
|
|
self.assertTrue(exists(join(self.proj_mod_path, 'spiders', 'test_spider.py')))
|
|
p, out, err = self.proc('genspider', spname, 'test.com', *args)
|
|
self.assertIn("Spider %r already exists in module" % spname, out)
|
|
|
|
def test_template_basic(self):
|
|
self.test_template('basic')
|
|
|
|
def test_template_csvfeed(self):
|
|
self.test_template('csvfeed')
|
|
|
|
def test_template_xmlfeed(self):
|
|
self.test_template('xmlfeed')
|
|
|
|
def test_list(self):
|
|
self.assertEqual(0, self.call('genspider', '--list'))
|
|
|
|
def test_dump(self):
|
|
self.assertEqual(0, self.call('genspider', '--dump=basic'))
|
|
self.assertEqual(0, self.call('genspider', '-d', 'basic'))
|
|
|
|
def test_same_name_as_project(self):
|
|
self.assertEqual(2, self.call('genspider', self.project_name))
|
|
assert not exists(join(self.proj_mod_path, 'spiders', '%s.py' % self.project_name))
|
|
|
|
|
|
class GenspiderStandaloneCommandTest(ProjectTest):
|
|
|
|
def test_generate_standalone_spider(self):
|
|
self.call('genspider', 'example', 'example.com')
|
|
assert exists(join(self.temp_path, 'example.py'))
|
|
|
|
|
|
class MiscCommandsTest(CommandTest):
|
|
|
|
def test_list(self):
|
|
self.assertEqual(0, self.call('list'))
|
|
|
|
|
|
class RunSpiderCommandTest(CommandTest):
|
|
|
|
debug_log_spider = """
|
|
import scrapy
|
|
|
|
class MySpider(scrapy.Spider):
|
|
name = 'myspider'
|
|
|
|
def start_requests(self):
|
|
self.logger.debug("It Works!")
|
|
return []
|
|
"""
|
|
|
|
@contextmanager
|
|
def _create_file(self, content, name):
|
|
tmpdir = self.mktemp()
|
|
os.mkdir(tmpdir)
|
|
fname = abspath(join(tmpdir, name))
|
|
with open(fname, 'w') as f:
|
|
f.write(content)
|
|
try:
|
|
yield fname
|
|
finally:
|
|
rmtree(tmpdir)
|
|
|
|
def runspider(self, code, name='myspider.py', args=()):
|
|
with self._create_file(code, name) as fname:
|
|
return self.proc('runspider', fname, *args)
|
|
|
|
def get_log(self, code, name='myspider.py', args=()):
|
|
p, stdout, stderr = self.runspider(code, name=name, args=args)
|
|
return stderr
|
|
|
|
def test_runspider(self):
|
|
log = self.get_log(self.debug_log_spider)
|
|
self.assertIn("DEBUG: It Works!", log)
|
|
self.assertIn("INFO: Spider opened", log)
|
|
self.assertIn("INFO: Closing spider (finished)", log)
|
|
self.assertIn("INFO: Spider closed (finished)", log)
|
|
|
|
def test_run_fail_spider(self):
|
|
proc, _, _ = self.runspider("import scrapy\n" + inspect.getsource(ExceptionSpider))
|
|
ret = proc.returncode
|
|
self.assertNotEqual(ret, 0)
|
|
|
|
def test_run_good_spider(self):
|
|
proc, _, _ = self.runspider("import scrapy\n" + inspect.getsource(NoRequestsSpider))
|
|
ret = proc.returncode
|
|
self.assertEqual(ret, 0)
|
|
|
|
def test_runspider_log_level(self):
|
|
log = self.get_log(self.debug_log_spider,
|
|
args=('-s', 'LOG_LEVEL=INFO'))
|
|
self.assertNotIn("DEBUG: It Works!", log)
|
|
self.assertIn("INFO: Spider opened", log)
|
|
|
|
def test_runspider_dnscache_disabled(self):
|
|
# see https://github.com/scrapy/scrapy/issues/2811
|
|
# The spider below should not be able to connect to localhost:12345,
|
|
# which is intended,
|
|
# but this should not be because of DNS lookup error
|
|
# assumption: localhost will resolve in all cases (true?)
|
|
log = self.get_log("""
|
|
import scrapy
|
|
|
|
class MySpider(scrapy.Spider):
|
|
name = 'myspider'
|
|
start_urls = ['http://localhost:12345']
|
|
|
|
def parse(self, response):
|
|
return {'test': 'value'}
|
|
""",
|
|
args=('-s', 'DNSCACHE_ENABLED=False'))
|
|
print(log)
|
|
self.assertNotIn("DNSLookupError", log)
|
|
self.assertIn("INFO: Spider opened", log)
|
|
|
|
def test_runspider_log_short_names(self):
|
|
log1 = self.get_log(self.debug_log_spider,
|
|
args=('-s', 'LOG_SHORT_NAMES=1'))
|
|
print(log1)
|
|
self.assertIn("[myspider] DEBUG: It Works!", log1)
|
|
self.assertIn("[scrapy]", log1)
|
|
self.assertNotIn("[scrapy.core.engine]", log1)
|
|
|
|
log2 = self.get_log(self.debug_log_spider,
|
|
args=('-s', 'LOG_SHORT_NAMES=0'))
|
|
print(log2)
|
|
self.assertIn("[myspider] DEBUG: It Works!", log2)
|
|
self.assertNotIn("[scrapy]", log2)
|
|
self.assertIn("[scrapy.core.engine]", log2)
|
|
|
|
def test_runspider_no_spider_found(self):
|
|
log = self.get_log("from scrapy.spiders import Spider\n")
|
|
self.assertIn("No spider found in file", log)
|
|
|
|
def test_runspider_file_not_found(self):
|
|
_, _, log = self.proc('runspider', 'some_non_existent_file')
|
|
self.assertIn("File not found: some_non_existent_file", log)
|
|
|
|
def test_runspider_unable_to_load(self):
|
|
log = self.get_log('', name='myspider.txt')
|
|
self.assertIn('Unable to load', log)
|
|
|
|
def test_start_requests_errors(self):
|
|
log = self.get_log("""
|
|
import scrapy
|
|
|
|
class BadSpider(scrapy.Spider):
|
|
name = "bad"
|
|
def start_requests(self):
|
|
raise Exception("oops!")
|
|
""", name="badspider.py")
|
|
print(log)
|
|
self.assertIn("start_requests", log)
|
|
self.assertIn("badspider.py", log)
|
|
|
|
|
|
class BenchCommandTest(CommandTest):
|
|
|
|
def test_run(self):
|
|
_, _, log = self.proc('bench', '-s', 'LOGSTATS_INTERVAL=0.001',
|
|
'-s', 'CLOSESPIDER_TIMEOUT=0.01')
|
|
self.assertIn('INFO: Crawled', log)
|
|
self.assertNotIn('Unhandled Error', log)
|