1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-24 23:23:47 +00:00

Test for robots.txt middleware for processing a request for which the robots.txt parser is ready.

This commit is contained in:
Artur Gaspar 2015-09-02 01:43:22 -03:00
parent a6a629e707
commit 668e5fd257

View File

@ -50,6 +50,12 @@ class RobotsTxtMiddlewareTest(unittest.TestCase):
self.assertIgnored(Request('http://site.local/static/'), middleware)
], fireOnOneErrback=True)
def test_robotstxt_ready_parser(self):
middleware = RobotsTxtMiddleware(self._get_successful_crawler())
d = self.assertNotIgnored(Request('http://site.local/allowed'), middleware)
d.addCallback(lambda _: self.assertNotIgnored(Request('http://site.local/allowed'), middleware))
return d
def test_robotstxt_meta(self):
middleware = RobotsTxtMiddleware(self._get_successful_crawler())
meta = {'dont_obey_robotstxt': True}