1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-06 11:00:46 +00:00

Refactor EngineTest tests.

This commit is contained in:
Andrey Rakhmatullin 2025-02-01 16:07:55 +05:00
parent 340819eff0
commit 200d76afa9
3 changed files with 46 additions and 44 deletions

View File

@ -243,46 +243,7 @@ class CrawlerRun:
self.signals_caught[sig] = signalargs
class EngineTest(unittest.TestCase):
@defer.inlineCallbacks
def test_crawler(self):
for spider in (
TestSpider,
DictItemsSpider,
AttrsItemsSpider,
DataClassItemsSpider,
):
run = CrawlerRun(spider)
yield run.run()
self._assert_visited_urls(run)
self._assert_scheduled_requests(run, count=9)
self._assert_downloaded_responses(run, count=9)
self._assert_scraped_items(run)
self._assert_signals_caught(run)
self._assert_bytes_received(run)
@defer.inlineCallbacks
def test_crawler_dupefilter(self):
run = CrawlerRun(TestDupeFilterSpider)
yield run.run()
self._assert_scheduled_requests(run, count=8)
self._assert_dropped_requests(run)
@defer.inlineCallbacks
def test_crawler_itemerror(self):
run = CrawlerRun(ItemZeroDivisionErrorSpider)
yield run.run()
self._assert_items_error(run)
@defer.inlineCallbacks
def test_crawler_change_close_reason_on_idle(self):
run = CrawlerRun(ChangeCloseReasonSpider)
yield run.run()
self.assertEqual(
{"spider": run.spider, "reason": "custom_reason"},
run.signals_caught[signals.spider_closed],
)
class EngineTestBase(unittest.TestCase):
def _assert_visited_urls(self, run: CrawlerRun):
must_be_visited = [
"/",
@ -422,6 +383,47 @@ class EngineTest(unittest.TestCase):
run.signals_caught[signals.spider_closed],
)
class EngineTest(EngineTestBase):
@defer.inlineCallbacks
def test_crawler(self):
for spider in (
TestSpider,
DictItemsSpider,
AttrsItemsSpider,
DataClassItemsSpider,
):
run = CrawlerRun(spider)
yield run.run()
self._assert_visited_urls(run)
self._assert_scheduled_requests(run, count=9)
self._assert_downloaded_responses(run, count=9)
self._assert_scraped_items(run)
self._assert_signals_caught(run)
self._assert_bytes_received(run)
@defer.inlineCallbacks
def test_crawler_dupefilter(self):
run = CrawlerRun(TestDupeFilterSpider)
yield run.run()
self._assert_scheduled_requests(run, count=8)
self._assert_dropped_requests(run)
@defer.inlineCallbacks
def test_crawler_itemerror(self):
run = CrawlerRun(ItemZeroDivisionErrorSpider)
yield run.run()
self._assert_items_error(run)
@defer.inlineCallbacks
def test_crawler_change_close_reason_on_idle(self):
run = CrawlerRun(ChangeCloseReasonSpider)
yield run.run()
self.assertEqual(
{"spider": run.spider, "reason": "custom_reason"},
run.signals_caught[signals.spider_closed],
)
@defer.inlineCallbacks
def test_close_downloader(self):
e = ExecutionEngine(get_crawler(TestSpider), lambda _: None)

View File

@ -7,7 +7,7 @@ from tests.test_engine import (
CrawlerRun,
DataClassItemsSpider,
DictItemsSpider,
EngineTest,
EngineTestBase,
TestSpider,
)
@ -18,7 +18,7 @@ class BytesReceivedCrawlerRun(CrawlerRun):
raise StopDownload(fail=False)
class BytesReceivedEngineTest(EngineTest):
class BytesReceivedEngineTest(EngineTestBase):
@defer.inlineCallbacks
def test_crawler(self):
for spider in (

View File

@ -7,7 +7,7 @@ from tests.test_engine import (
CrawlerRun,
DataClassItemsSpider,
DictItemsSpider,
EngineTest,
EngineTestBase,
TestSpider,
)
@ -18,7 +18,7 @@ class HeadersReceivedCrawlerRun(CrawlerRun):
raise StopDownload(fail=False)
class HeadersReceivedEngineTest(EngineTest):
class HeadersReceivedEngineTest(EngineTestBase):
@defer.inlineCallbacks
def test_crawler(self):
for spider in (