1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-25 18:43:58 +00:00

Move default settings to settings/default_settings.py.

modified:   scrapy/pipelines/files.py
	modified:   scrapy/pipelines/images.py
	modified:   scrapy/settings/default_settings.py
This commit is contained in:
djunzu 2016-03-31 19:20:33 -03:00
parent e9d48f8a8e
commit c7fc17866f
3 changed files with 19 additions and 17 deletions

View File

@ -213,14 +213,11 @@ class FilesPipeline(MediaPipeline):
"""
MEDIA_NAME = "file"
EXPIRES = 90
STORE_SCHEMES = {
'': FSFilesStore,
'file': FSFilesStore,
's3': S3FilesStore,
}
DEFAULT_FILES_URLS_FIELD = 'file_urls'
DEFAULT_FILES_RESULT_FIELD = 'files'
def __init__(self, store_uri, download_func=None):
if not store_uri:
@ -235,9 +232,9 @@ class FilesPipeline(MediaPipeline):
s3store.AWS_SECRET_ACCESS_KEY = settings['AWS_SECRET_ACCESS_KEY']
s3store.POLICY = settings['FILES_STORE_S3_ACL']
cls.FILES_URLS_FIELD = settings.get('FILES_URLS_FIELD', cls.DEFAULT_FILES_URLS_FIELD)
cls.FILES_RESULT_FIELD = settings.get('FILES_RESULT_FIELD', cls.DEFAULT_FILES_RESULT_FIELD)
cls.EXPIRES = settings.getint('FILES_EXPIRES', 90)
cls.FILES_URLS_FIELD = settings.get('FILES_URLS_FIELD')
cls.FILES_RESULT_FIELD = settings.get('FILES_RESULT_FIELD')
cls.EXPIRES = settings.getint('FILES_EXPIRES')
store_uri = settings['FILES_STORE']
return cls(store_uri)

View File

@ -36,24 +36,19 @@ class ImagesPipeline(FilesPipeline):
"""
MEDIA_NAME = 'image'
MIN_WIDTH = 0
MIN_HEIGHT = 0
THUMBS = {}
DEFAULT_IMAGES_URLS_FIELD = 'image_urls'
DEFAULT_IMAGES_RESULT_FIELD = 'images'
@classmethod
def from_settings(cls, settings):
cls.MIN_WIDTH = settings.getint('IMAGES_MIN_WIDTH', 0)
cls.MIN_HEIGHT = settings.getint('IMAGES_MIN_HEIGHT', 0)
cls.EXPIRES = settings.getint('IMAGES_EXPIRES', 90)
cls.THUMBS = settings.get('IMAGES_THUMBS', {})
cls.MIN_WIDTH = settings.getint('IMAGES_MIN_WIDTH')
cls.MIN_HEIGHT = settings.getint('IMAGES_MIN_HEIGHT')
cls.EXPIRES = settings.getint('IMAGES_EXPIRES')
cls.THUMBS = settings.get('IMAGES_THUMBS')
s3store = cls.STORE_SCHEMES['s3']
s3store.AWS_ACCESS_KEY_ID = settings['AWS_ACCESS_KEY_ID']
s3store.AWS_SECRET_ACCESS_KEY = settings['AWS_SECRET_ACCESS_KEY']
cls.IMAGES_URLS_FIELD = settings.get('IMAGES_URLS_FIELD', cls.DEFAULT_IMAGES_URLS_FIELD)
cls.IMAGES_RESULT_FIELD = settings.get('IMAGES_RESULT_FIELD', cls.DEFAULT_IMAGES_RESULT_FIELD)
cls.IMAGES_URLS_FIELD = settings.get('IMAGES_URLS_FIELD')
cls.IMAGES_RESULT_FIELD = settings.get('IMAGES_RESULT_FIELD')
store_uri = settings['IMAGES_STORE']
return cls(store_uri)

View File

@ -159,6 +159,9 @@ FEED_EXPORTERS_BASE = {
}
FILES_STORE_S3_ACL = 'private'
FILES_EXPIRES = 90
FILES_URLS_FIELD = 'file_urls'
FILES_RESULT_FIELD = 'files'
HTTPCACHE_ENABLED = False
HTTPCACHE_DIR = 'httpcache'
@ -175,6 +178,13 @@ HTTPCACHE_GZIP = False
HTTPPROXY_AUTH_ENCODING = 'latin-1'
IMAGES_MIN_WIDTH = 0
IMAGES_MIN_HEIGHT = 0
IMAGES_EXPIRES = 90
IMAGES_THUMBS = {}
IMAGES_URLS_FIELD = 'image_urls'
IMAGES_RESULT_FIELD = 'images'
ITEM_PROCESSOR = 'scrapy.pipelines.ItemPipelineManager'
ITEM_PIPELINES = {}