mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-24 05:04:25 +00:00
Added SpiderQueue tests. SQS spider queue not tested because operations take too long to complete and it's not easy to know when they have. Closes #227
This commit is contained in:
parent
8d1d3493e7
commit
ff9de424c8
@ -7,6 +7,7 @@ from cStringIO import StringIO
|
||||
from scrapy.spider import BaseSpider
|
||||
from scrapy.contrib.feedexport import FileFeedStorage, FTPFeedStorage, S3FeedStorage
|
||||
from scrapy.utils.url import path_to_file_uri
|
||||
from scrapy.utils.test import assert_aws_environ
|
||||
|
||||
class FeedStorageTest(unittest.TestCase):
|
||||
|
||||
@ -56,13 +57,11 @@ class S3FeedStorageTest(unittest.TestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_store(self):
|
||||
assert_aws_environ()
|
||||
uri = os.environ.get('FEEDTEST_S3_URI')
|
||||
if not uri:
|
||||
raise unittest.SkipTest("No S3 bucket available for testing")
|
||||
try:
|
||||
from boto import connect_s3
|
||||
except ImportError:
|
||||
raise unittest.SkipTest("Missing library: boto")
|
||||
raise unittest.SkipTest("No S3 URI available for testing")
|
||||
from boto import connect_s3
|
||||
storage = S3FeedStorage(uri)
|
||||
yield storage.store(StringIO("content"), BaseSpider("default"))
|
||||
u = urlparse.urlparse(uri)
|
||||
|
@ -1,21 +1,17 @@
|
||||
import os
|
||||
|
||||
from twisted.trial import unittest
|
||||
from zope.interface.verify import verifyObject
|
||||
|
||||
from scrapy.interfaces import ISpiderQueue
|
||||
from scrapy.utils.test import assert_aws_environ
|
||||
|
||||
class SQSSpiderQueueTest(unittest.TestCase):
|
||||
|
||||
try:
|
||||
import boto
|
||||
except ImportError, e:
|
||||
skip = str(e)
|
||||
|
||||
if 'AWS_ACCESS_KEY_ID' not in os.environ:
|
||||
skip = "AWS keys not found"
|
||||
def setUp(self):
|
||||
assert_aws_environ()
|
||||
|
||||
def test_interface(self):
|
||||
from scrapy.contrib.spiderqueue import SQSSpiderQueue
|
||||
verifyObject(ISpiderQueue, SQSSpiderQueue())
|
||||
|
||||
# XXX: testing SQS queue operations is hard because there are long delays
|
||||
# for the operations to complete
|
||||
|
@ -1,4 +1,5 @@
|
||||
import unittest
|
||||
from twisted.internet.defer import inlineCallbacks, maybeDeferred
|
||||
from twisted.trial import unittest
|
||||
|
||||
from zope.interface.verify import verifyObject
|
||||
|
||||
@ -6,7 +7,60 @@ from scrapy.interfaces import ISpiderQueue
|
||||
from scrapy.spiderqueue import SqliteSpiderQueue
|
||||
|
||||
class SpiderQueueTest(unittest.TestCase):
|
||||
"""This test case can be used easily for testing other SpiderQueue's by
|
||||
just changing the _get_queue() method. It also supports queues with
|
||||
deferred methods.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.q = self._get_queue()
|
||||
self.name = 'spider1'
|
||||
self.args = {'arg1': 'val1', 'arg2': 2}
|
||||
self.msg = self.args.copy()
|
||||
self.msg['name'] = self.name
|
||||
|
||||
def _get_queue(self):
|
||||
return SqliteSpiderQueue(':memory:')
|
||||
|
||||
def test_interface(self):
|
||||
verifyObject(ISpiderQueue, SqliteSpiderQueue())
|
||||
verifyObject(ISpiderQueue, self.q)
|
||||
|
||||
@inlineCallbacks
|
||||
def test_add_pop_count(self):
|
||||
c = yield maybeDeferred(self.q.count)
|
||||
self.assertEqual(c, 0)
|
||||
|
||||
yield maybeDeferred(self.q.add, self.name, **self.args)
|
||||
|
||||
c = yield maybeDeferred(self.q.count)
|
||||
self.assertEqual(c, 1)
|
||||
|
||||
m = yield maybeDeferred(self.q.pop)
|
||||
self.assertEqual(m, self.msg)
|
||||
|
||||
c = yield maybeDeferred(self.q.count)
|
||||
self.assertEqual(c, 0)
|
||||
|
||||
@inlineCallbacks
|
||||
def test_list(self):
|
||||
l = yield maybeDeferred(self.q.list)
|
||||
self.assertEqual(l, [])
|
||||
|
||||
yield maybeDeferred(self.q.add, self.name, **self.args)
|
||||
yield maybeDeferred(self.q.add, self.name, **self.args)
|
||||
|
||||
l = yield maybeDeferred(self.q.list)
|
||||
self.assertEqual(l, [self.msg, self.msg])
|
||||
|
||||
@inlineCallbacks
|
||||
def test_clear(self):
|
||||
yield maybeDeferred(self.q.add, self.name, **self.args)
|
||||
yield maybeDeferred(self.q.add, self.name, **self.args)
|
||||
|
||||
c = yield maybeDeferred(self.q.count)
|
||||
self.assertEqual(c, 2)
|
||||
|
||||
yield maybeDeferred(self.q.clear)
|
||||
|
||||
c = yield maybeDeferred(self.q.count)
|
||||
self.assertEqual(c, 0)
|
||||
|
@ -5,8 +5,7 @@ This module contains some assorted functions used in tests
|
||||
import os
|
||||
|
||||
import libxml2
|
||||
|
||||
from scrapy.selector.document import Libxml2Document
|
||||
from twisted.trial.unittest import SkipTest
|
||||
|
||||
def libxml2debug(testfunction):
|
||||
"""Decorator for debugging libxml2 memory leaks inside a function.
|
||||
@ -28,3 +27,15 @@ def libxml2debug(testfunction):
|
||||
return newfunc
|
||||
else:
|
||||
return testfunction
|
||||
|
||||
def assert_aws_environ():
|
||||
"""Asserts the current environment is suitable for running AWS testsi.
|
||||
Raises SkipTest with the reason if it's not.
|
||||
"""
|
||||
try:
|
||||
import boto
|
||||
except ImportError, e:
|
||||
raise SkipTest(str(e))
|
||||
|
||||
if 'AWS_ACCESS_KEY_ID' not in os.environ:
|
||||
raise SkipTest("AWS keys not found")
|
||||
|
Loading…
x
Reference in New Issue
Block a user