1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-24 13:23:59 +00:00
scrapy/tests/test_logformatter.py
Omer Schleifer ff3e299eb0 [MRG+2] add flags to request (#2082)
* add flags to request

* fxi test - add flags to request

* fix test(2) - add flags to request

* fix test(2) - add flags to request

* Updated test to reqser with flags field of request

* Updated documntation with flags field of request

* fix test identation

* fix test failed

* make the change backward comptaible

* remove  unrequired  spaces, fix documentation request flags

* remove  unrequired  space

* fx assert equal

* flags default is empty list

* Add flags to request

* add flags to request

* fxi test - add flags to request

* fix test(2) - add flags to request

* fix test(2) - add flags to request

* Updated test to reqser with flags field of request

* Updated documntation with flags field of request

* fix test identation

* fix test failed

* make the change backward comptaible

* remove  unrequired  spaces, fix documentation request flags

* remove  unrequired  space

* fx assert equal

* flags default is empty list

* add flags to request squashed commits
2017-02-20 20:42:29 +06:00

69 lines
2.5 KiB
Python

import unittest
import six
from scrapy.spiders import Spider
from scrapy.http import Request, Response
from scrapy.item import Item, Field
from scrapy.logformatter import LogFormatter
class CustomItem(Item):
name = Field()
def __str__(self):
return "name: %s" % self['name']
class LoggingContribTest(unittest.TestCase):
def setUp(self):
self.formatter = LogFormatter()
self.spider = Spider('default')
def test_crawled(self):
req = Request("http://www.example.com")
res = Response("http://www.example.com")
logkws = self.formatter.crawled(req, res, self.spider)
logline = logkws['msg'] % logkws['args']
self.assertEqual(logline,
"Crawled (200) <GET http://www.example.com> (referer: None)")
req = Request("http://www.example.com", headers={'referer': 'http://example.com'})
res = Response("http://www.example.com", flags=['cached'])
logkws = self.formatter.crawled(req, res, self.spider)
logline = logkws['msg'] % logkws['args']
self.assertEqual(logline,
"Crawled (200) <GET http://www.example.com> (referer: http://example.com) ['cached']")
def test_flags_in_request(self):
req = Request("http://www.example.com", flags=['test','flag'])
res = Response("http://www.example.com")
logkws = self.formatter.crawled(req, res, self.spider)
logline = logkws['msg'] % logkws['args']
self.assertEqual(logline,
"Crawled (200) <GET http://www.example.com> ['test', 'flag'] (referer: None)")
def test_dropped(self):
item = {}
exception = Exception(u"\u2018")
response = Response("http://www.example.com")
logkws = self.formatter.dropped(item, exception, response, self.spider)
logline = logkws['msg'] % logkws['args']
lines = logline.splitlines()
assert all(isinstance(x, six.text_type) for x in lines)
self.assertEqual(lines, [u"Dropped: \u2018", '{}'])
def test_scraped(self):
item = CustomItem()
item['name'] = u'\xa3'
response = Response("http://www.example.com")
logkws = self.formatter.scraped(item, response, self.spider)
logline = logkws['msg'] % logkws['args']
lines = logline.splitlines()
assert all(isinstance(x, six.text_type) for x in lines)
self.assertEqual(lines, [u"Scraped from <200 http://www.example.com>", u'name: \xa3'])
if __name__ == "__main__":
unittest.main()