mirror of
https://github.com/scrapy/scrapy.git
synced 2025-02-06 09:07:32 +00:00
Be consistent with domain used for links to documentation website
This commit is contained in:
parent
88326cd8be
commit
65d631329a
@ -1,5 +1,5 @@
|
||||
The guidelines for contributing are available here:
|
||||
https://doc.scrapy.org/en/master/contributing.html
|
||||
https://docs.scrapy.org/en/master/contributing.html
|
||||
|
||||
Please do not abuse the issue tracker for support questions.
|
||||
If your issue topic can be rephrased to "How to ...?", please use the
|
||||
|
2
INSTALL
2
INSTALL
@ -1,4 +1,4 @@
|
||||
For information about installing Scrapy see:
|
||||
|
||||
* docs/intro/install.rst (local file)
|
||||
* https://doc.scrapy.org/en/latest/intro/install.html (online version)
|
||||
* https://docs.scrapy.org/en/latest/intro/install.html (online version)
|
||||
|
@ -51,18 +51,18 @@ The quick way::
|
||||
pip install scrapy
|
||||
|
||||
For more details see the install section in the documentation:
|
||||
https://doc.scrapy.org/en/latest/intro/install.html
|
||||
https://docs.scrapy.org/en/latest/intro/install.html
|
||||
|
||||
Documentation
|
||||
=============
|
||||
|
||||
Documentation is available online at https://doc.scrapy.org/ and in the ``docs``
|
||||
Documentation is available online at https://docs.scrapy.org/ and in the ``docs``
|
||||
directory.
|
||||
|
||||
Releases
|
||||
========
|
||||
|
||||
You can find release notes at https://doc.scrapy.org/en/latest/news.html
|
||||
You can find release notes at https://docs.scrapy.org/en/latest/news.html
|
||||
|
||||
Community (blog, twitter, mail list, IRC)
|
||||
=========================================
|
||||
@ -72,7 +72,7 @@ See https://scrapy.org/community/
|
||||
Contributing
|
||||
============
|
||||
|
||||
See https://doc.scrapy.org/en/master/contributing.html
|
||||
See https://docs.scrapy.org/en/master/contributing.html
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
@ -7,7 +7,7 @@ Contributing to Scrapy
|
||||
.. important::
|
||||
|
||||
Double check that you are reading the most recent version of this document at
|
||||
https://doc.scrapy.org/en/master/contributing.html
|
||||
https://docs.scrapy.org/en/master/contributing.html
|
||||
|
||||
There are many ways to contribute to Scrapy. Here are some of them:
|
||||
|
||||
|
@ -100,7 +100,7 @@ To explain how to use the selectors we'll use the `Scrapy shell` (which
|
||||
provides interactive testing) and an example page located in the Scrapy
|
||||
documentation server:
|
||||
|
||||
https://doc.scrapy.org/en/latest/_static/selectors-sample1.html
|
||||
https://docs.scrapy.org/en/latest/_static/selectors-sample1.html
|
||||
|
||||
.. _topics-selectors-htmlcode:
|
||||
|
||||
@ -113,7 +113,7 @@ For the sake of completeness, here's its full HTML code:
|
||||
|
||||
First, let's open the shell::
|
||||
|
||||
scrapy shell https://doc.scrapy.org/en/latest/_static/selectors-sample1.html
|
||||
scrapy shell https://docs.scrapy.org/en/latest/_static/selectors-sample1.html
|
||||
|
||||
Then, after the shell loads, you'll have the response available as ``response``
|
||||
shell variable, and its attached selector in ``response.selector`` attribute.
|
||||
|
@ -112,7 +112,7 @@ class TelnetConsole(protocol.ServerFactory):
|
||||
'prefs': print_live_refs,
|
||||
'hpy': hpy,
|
||||
'help': "This is Scrapy telnet console. For more info see: "
|
||||
"https://doc.scrapy.org/en/latest/topics/telnetconsole.html",
|
||||
"https://docs.scrapy.org/en/latest/topics/telnetconsole.html",
|
||||
}
|
||||
self.crawler.signals.send_catch_log(update_telnet_vars, telnet_vars=telnet_vars)
|
||||
return telnet_vars
|
||||
|
@ -3,7 +3,7 @@
|
||||
# Define here the models for your scraped items
|
||||
#
|
||||
# See documentation in:
|
||||
# https://doc.scrapy.org/en/latest/topics/items.html
|
||||
# https://docs.scrapy.org/en/latest/topics/items.html
|
||||
|
||||
import scrapy
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
# Define here the models for your spider middleware
|
||||
#
|
||||
# See documentation in:
|
||||
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
|
||||
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
|
||||
|
||||
from scrapy import signals
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
# Define your item pipelines here
|
||||
#
|
||||
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
|
||||
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
|
||||
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
|
||||
|
||||
|
||||
class ${ProjectName}Pipeline(object):
|
||||
|
@ -5,9 +5,9 @@
|
||||
# For simplicity, this file contains only settings considered important or
|
||||
# commonly used. You can find more settings consulting the documentation:
|
||||
#
|
||||
# https://doc.scrapy.org/en/latest/topics/settings.html
|
||||
# https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
|
||||
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
|
||||
# https://docs.scrapy.org/en/latest/topics/settings.html
|
||||
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
|
||||
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
|
||||
|
||||
BOT_NAME = '$project_name'
|
||||
|
||||
@ -25,7 +25,7 @@ ROBOTSTXT_OBEY = True
|
||||
#CONCURRENT_REQUESTS = 32
|
||||
|
||||
# Configure a delay for requests for the same website (default: 0)
|
||||
# See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
|
||||
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
|
||||
# See also autothrottle settings and docs
|
||||
#DOWNLOAD_DELAY = 3
|
||||
# The download delay setting will honor only one of:
|
||||
@ -45,31 +45,31 @@ ROBOTSTXT_OBEY = True
|
||||
#}
|
||||
|
||||
# Enable or disable spider middlewares
|
||||
# See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
|
||||
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
|
||||
#SPIDER_MIDDLEWARES = {
|
||||
# '$project_name.middlewares.${ProjectName}SpiderMiddleware': 543,
|
||||
#}
|
||||
|
||||
# Enable or disable downloader middlewares
|
||||
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
|
||||
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
|
||||
#DOWNLOADER_MIDDLEWARES = {
|
||||
# '$project_name.middlewares.${ProjectName}DownloaderMiddleware': 543,
|
||||
#}
|
||||
|
||||
# Enable or disable extensions
|
||||
# See https://doc.scrapy.org/en/latest/topics/extensions.html
|
||||
# See https://docs.scrapy.org/en/latest/topics/extensions.html
|
||||
#EXTENSIONS = {
|
||||
# 'scrapy.extensions.telnet.TelnetConsole': None,
|
||||
#}
|
||||
|
||||
# Configure item pipelines
|
||||
# See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
|
||||
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
|
||||
#ITEM_PIPELINES = {
|
||||
# '$project_name.pipelines.${ProjectName}Pipeline': 300,
|
||||
#}
|
||||
|
||||
# Enable and configure the AutoThrottle extension (disabled by default)
|
||||
# See https://doc.scrapy.org/en/latest/topics/autothrottle.html
|
||||
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
|
||||
#AUTOTHROTTLE_ENABLED = True
|
||||
# The initial download delay
|
||||
#AUTOTHROTTLE_START_DELAY = 5
|
||||
@ -82,7 +82,7 @@ ROBOTSTXT_OBEY = True
|
||||
#AUTOTHROTTLE_DEBUG = False
|
||||
|
||||
# Enable and configure HTTP caching (disabled by default)
|
||||
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
|
||||
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
|
||||
#HTTPCACHE_ENABLED = True
|
||||
#HTTPCACHE_EXPIRATION_SECS = 0
|
||||
#HTTPCACHE_DIR = 'httpcache'
|
||||
|
@ -61,7 +61,7 @@ ItemForm
|
||||
--------
|
||||
|
||||
Pros:
|
||||
- same API used for Items (see https://doc.scrapy.org/en/latest/topics/items.html)
|
||||
- same API used for Items (see https://docs.scrapy.org/en/latest/topics/items.html)
|
||||
- some people consider setitem API more elegant than methods API
|
||||
|
||||
Cons:
|
||||
|
@ -16,7 +16,7 @@ Motivation
|
||||
==========
|
||||
|
||||
When you use Selectors in Scrapy, your final goal is to "extract" the data that
|
||||
you've selected, as the [https://doc.scrapy.org/en/latest/topics/selectors.html
|
||||
you've selected, as the [https://docs.scrapy.org/en/latest/topics/selectors.html
|
||||
XPath Selectors documentation] says (bolding by me):
|
||||
|
||||
When you’re scraping web pages, the most common task you need to perform is
|
||||
@ -71,5 +71,5 @@ webpage or set of pages.
|
||||
References
|
||||
==========
|
||||
|
||||
1. XPath Selectors (https://doc.scrapy.org/topics/selectors.html)
|
||||
1. XPath Selectors (https://docs.scrapy.org/topics/selectors.html)
|
||||
2. XPath and XSLT with lxml (http://lxml.de/xpathxslt.html)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
tests: this package contains all Scrapy unittests
|
||||
|
||||
see https://doc.scrapy.org/en/latest/contributing.html#running-tests
|
||||
see https://docs.scrapy.org/en/latest/contributing.html#running-tests
|
||||
"""
|
||||
|
||||
import os
|
||||
|
Loading…
x
Reference in New Issue
Block a user