1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-24 08:03:59 +00:00

added COOKIES_ENABLED setting to support disabling the cookies middleware

This commit is contained in:
Pablo Hoffman 2011-05-27 00:35:34 -03:00
parent 756bf0cc06
commit 2fa0f75f2d
4 changed files with 19 additions and 12 deletions

View File

@ -168,14 +168,26 @@ CookiesMiddleware
.. class:: CookiesMiddleware
This middleware enables working with sites that need cookies. It keeps track
of merging cookies sent by servers, so that they're send in future requests
for that spider, just like a web browser would do.
This middleware enables working with sites that require cookies, such as
those that use sessions. It keeps track of cookies sent by web servers, and
send them back on subsequent requests (from that spider), just like web
browsers do.
The following settings can be used to configure the cookie middleware:
* :setting:`COOKIES_ENABLED`
* :setting:`COOKIES_DEBUG`
.. setting:: COOKIES_ENABLED
COOKIES_ENABLED
~~~~~~~~~~~~~~~
Default: ``True``
Whether to enable the cookies middleware. If disabled, no cookies will be sent
to web servers.
.. setting:: COOKIES_DEBUG
COOKIES_DEBUG

View File

@ -261,15 +261,6 @@ Default: ``8``
Maximum number of spiders to scrape in parallel.
.. setting:: COOKIES_DEBUG
COOKIES_DEBUG
-------------
Default: ``False``
Enable debugging message of Cookies Downloader Middleware.
.. setting:: DEFAULT_ITEM_CLASS
DEFAULT_ITEM_CLASS

View File

@ -3,6 +3,7 @@ from collections import defaultdict
from scrapy.xlib.pydispatch import dispatcher
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.http import Response
from scrapy.http.cookies import CookieJar
from scrapy.conf import settings
@ -14,6 +15,8 @@ class CookiesMiddleware(object):
debug = settings.getbool('COOKIES_DEBUG')
def __init__(self):
if not settings.getbool('COOKIES_ENABLED'):
raise NotConfigured
self.jars = defaultdict(CookieJar)
dispatcher.connect(self.spider_closed, signals.spider_closed)

View File

@ -28,6 +28,7 @@ CONCURRENT_ITEMS = 100
CONCURRENT_REQUESTS_PER_SPIDER = 8
CONCURRENT_SPIDERS = 8
COOKIES_ENABLED = True
COOKIES_DEBUG = False
DEFAULT_ITEM_CLASS = 'scrapy.item.Item'