mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-21 04:13:26 +00:00
remove ancient modules kept only for error messages
This commit is contained in:
parent
d4c7cc848b
commit
36453348fa
@ -5,6 +5,4 @@ omit =
|
||||
tests/*
|
||||
scrapy/xlib/*
|
||||
scrapy/conf.py
|
||||
scrapy/stats.py
|
||||
scrapy/project.py
|
||||
scrapy/log.py
|
||||
|
@ -11,8 +11,6 @@ def _py_files(folder):
|
||||
collect_ignore = [
|
||||
# deprecated or moved modules
|
||||
"scrapy/conf.py",
|
||||
"scrapy/stats.py",
|
||||
"scrapy/project.py",
|
||||
"scrapy/log.py",
|
||||
|
||||
# not a test, but looks like a test
|
||||
|
@ -1,17 +0,0 @@
|
||||
|
||||
"""
|
||||
Obsolete module, kept for giving a meaningful error message when trying to
|
||||
import.
|
||||
"""
|
||||
|
||||
raise ImportError("""scrapy.project usage has become obsolete.
|
||||
|
||||
If you want to get the Scrapy crawler from your extension, middleware or
|
||||
pipeline implement the `from_crawler` class method (or look up for extending
|
||||
components that have already done it, such as spiders).
|
||||
|
||||
For example:
|
||||
|
||||
@classmethod
|
||||
def from_crawler(cls, crawler):
|
||||
return cls(crawler)""")
|
@ -1,8 +0,0 @@
|
||||
|
||||
"""
|
||||
Obsolete module, kept for giving a meaningful error message when trying to
|
||||
import.
|
||||
"""
|
||||
|
||||
raise ImportError("scrapy.stats usage has become obsolete, use "
|
||||
"`crawler.stats` attribute instead")
|
Loading…
x
Reference in New Issue
Block a user