1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-23 14:24:19 +00:00

Added Test

This commit is contained in:
purvaudai 2019-11-11 18:34:15 +05:30
parent 9e6e2dde2b
commit 970c3be160
2 changed files with 5 additions and 4 deletions

View File

@ -201,7 +201,7 @@ class FeedExporter(object):
self.settings = settings self.settings = settings
if not settings['FEED_URI']: if not settings['FEED_URI']:
raise NotConfigured raise NotConfigured
self.urifmt=str(settings['FEED_URI']) self.urifmt = str(settings['FEED_URI'])
self.format = settings['FEED_FORMAT'].lower() self.format = settings['FEED_FORMAT'].lower()
self.export_encoding = settings['FEED_EXPORT_ENCODING'] self.export_encoding = settings['FEED_EXPORT_ENCODING']
self.storages = self._load_components('FEED_STORAGES') self.storages = self._load_components('FEED_STORAGES')

View File

@ -407,6 +407,7 @@ class FeedExportTest(unittest.TestCase):
defaults = { defaults = {
'FEED_URI': res_uri, 'FEED_URI': res_uri,
'FEED_FORMAT': 'csv', 'FEED_FORMAT': 'csv',
'FEED_PATH': res_path
} }
defaults.update(settings or {}) defaults.update(settings or {})
try: try:
@ -415,7 +416,7 @@ class FeedExportTest(unittest.TestCase):
spider_cls.start_urls = [s.url('/')] spider_cls.start_urls = [s.url('/')]
yield runner.crawl(spider_cls) yield runner.crawl(spider_cls)
with open(defaults['FEED_URI'], 'rb') as f: with open(defaults['FEED_PATH'], 'rb') as f:
content = f.read() content = f.read()
finally: finally:
@ -855,8 +856,8 @@ class FeedExportTest(unittest.TestCase):
'FEED_FORMAT': 'csv', 'FEED_FORMAT': 'csv',
'FEED_STORE_EMPTY': True, 'FEED_STORE_EMPTY': True,
'FEED_URI': feed_uri, 'FEED_URI': feed_uri,
'FEED_PATH': feed_uri
} }
data = yield self.exported_no_data(settings) data = yield self.exported_no_data(settings)
self.assertEqual(data, b'') self.assertEqual(data, b'')
shutil.rmtree(tmpdir, ignore_errors=True) shutil.rmtree(tmpdir, ignore_errors=True)