mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-23 04:44:04 +00:00
cPickle as pickle
--HG-- extra : convert_revision : svn%3Ab85faa78-f9eb-468e-a121-7cced6da292c%4089
This commit is contained in:
parent
30202c54a4
commit
e8b5a07a15
@ -4,7 +4,7 @@ import os
|
|||||||
import hashlib
|
import hashlib
|
||||||
import datetime
|
import datetime
|
||||||
import urlparse
|
import urlparse
|
||||||
import cPickle
|
import cPickle as pickle
|
||||||
from pydispatch import dispatcher
|
from pydispatch import dispatcher
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
@ -114,7 +114,7 @@ class Cache(object):
|
|||||||
pickled_meta = os.path.join(self.requestpath(domain, key), 'pickled_meta')
|
pickled_meta = os.path.join(self.requestpath(domain, key), 'pickled_meta')
|
||||||
if os.path.exists(pickled_meta):
|
if os.path.exists(pickled_meta):
|
||||||
with open(pickled_meta, 'rb') as f:
|
with open(pickled_meta, 'rb') as f:
|
||||||
metadata = cPickle.load(f)
|
metadata = pickle.load(f)
|
||||||
if datetime.datetime.utcnow() <= metadata['timestamp'] + datetime.timedelta(seconds=settings.getint('CACHE2_EXPIRATION_SECS')):
|
if datetime.datetime.utcnow() <= metadata['timestamp'] + datetime.timedelta(seconds=settings.getint('CACHE2_EXPIRATION_SECS')):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
@ -134,7 +134,7 @@ class Cache(object):
|
|||||||
requestpath = self.requestpath(domain, key)
|
requestpath = self.requestpath(domain, key)
|
||||||
metadata = responsebody = responseheaders = None
|
metadata = responsebody = responseheaders = None
|
||||||
with open(os.path.join(requestpath, 'pickled_meta'), 'rb') as f:
|
with open(os.path.join(requestpath, 'pickled_meta'), 'rb') as f:
|
||||||
metadata = cPickle.load(f)
|
metadata = pickle.load(f)
|
||||||
with open(os.path.join(requestpath, 'response_body')) as f:
|
with open(os.path.join(requestpath, 'response_body')) as f:
|
||||||
responsebody = f.read()
|
responsebody = f.read()
|
||||||
with open(os.path.join(requestpath, 'response_headers')) as f:
|
with open(os.path.join(requestpath, 'response_headers')) as f:
|
||||||
@ -170,7 +170,7 @@ class Cache(object):
|
|||||||
f.write(repr(metadata))
|
f.write(repr(metadata))
|
||||||
# pickled metadata (to recover without using eval)
|
# pickled metadata (to recover without using eval)
|
||||||
with open(os.path.join(requestpath, 'pickled_meta'), 'wb') as f:
|
with open(os.path.join(requestpath, 'pickled_meta'), 'wb') as f:
|
||||||
cPickle.dump(metadata, f, -1)
|
pickle.dump(metadata, f, -1)
|
||||||
# response
|
# response
|
||||||
with open(os.path.join(requestpath, 'response_headers'), 'w') as f:
|
with open(os.path.join(requestpath, 'response_headers'), 'w') as f:
|
||||||
f.write(headers_dict_to_raw(response.headers))
|
f.write(headers_dict_to_raw(response.headers))
|
||||||
|
@ -86,6 +86,11 @@ class XPathSelector(object):
|
|||||||
class XPathSelectorList(list):
|
class XPathSelectorList(list):
|
||||||
"""List of XPathSelector objects"""
|
"""List of XPathSelector objects"""
|
||||||
|
|
||||||
|
def __getitem__(self, i):
|
||||||
|
print '__getitem__'
|
||||||
|
self.__getitem__(i)
|
||||||
|
#return XPathSelectorList(list.__getslice__(self, i, j))
|
||||||
|
|
||||||
def x(self, xpath):
|
def x(self, xpath):
|
||||||
"""Perform the given XPath query on each XPathSelector of the list and
|
"""Perform the given XPath query on each XPathSelector of the list and
|
||||||
return a new (flattened) XPathSelectorList of the results"""
|
return a new (flattened) XPathSelectorList of the results"""
|
||||||
|
Loading…
x
Reference in New Issue
Block a user