1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-24 10:24:13 +00:00

Updating S3FeedStorage instancing without AWS key.

This commit is contained in:
nyov 2019-02-23 17:49:58 +00:00
parent 7a398b7086
commit 5442c2d3c3
2 changed files with 7 additions and 3 deletions

View File

@ -98,7 +98,8 @@ class S3FeedStorage(BlockingFeedStorage):
# without using from_crawler)
no_defaults = access_key is None and secret_key is None
if no_defaults:
from scrapy.conf import settings
from scrapy.utils.project import get_project_settings
settings = get_project_settings()
if 'AWS_ACCESS_KEY_ID' in settings or 'AWS_SECRET_ACCESS_KEY' in settings:
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning

View File

@ -26,6 +26,7 @@ from scrapy.extensions.feedexport import (
BlockingFeedStorage)
from scrapy.utils.test import assert_aws_environ, get_s3_content_and_delete, get_crawler
from scrapy.utils.python import to_native_str
from scrapy.utils.project import get_project_settings
class FileFeedStorageTest(unittest.TestCase):
@ -134,8 +135,10 @@ class BlockingFeedStorageTest(unittest.TestCase):
class S3FeedStorageTest(unittest.TestCase):
@mock.patch('scrapy.conf.settings', new={'AWS_ACCESS_KEY_ID': 'conf_key',
'AWS_SECRET_ACCESS_KEY': 'conf_secret'}, create=True)
@mock.patch('scrapy.utils.project.get_project_settings',
new=mock.MagicMock(return_value={'AWS_ACCESS_KEY_ID': 'conf_key',
'AWS_SECRET_ACCESS_KEY': 'conf_secret'}),
create=True)
def test_parse_credentials(self):
try:
import boto