1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-25 15:03:56 +00:00

wrapped some big lines

This commit is contained in:
Pablo Hoffman 2009-08-31 18:50:20 -03:00
parent b6c1392f9b
commit 6c58d06f0f
2 changed files with 16 additions and 8 deletions

View File

@ -14,11 +14,13 @@ class Command(ScrapyCommand):
return "Fetch a URL using the Scrapy downloader"
def long_desc(self):
return "Fetch a URL using the Scrapy downloader and print its content to stdout. You may want to use --nolog to disable logging"
return "Fetch a URL using the Scrapy downloader and print its content " \
"to stdout. You may want to use --nolog to disable logging"
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
parser.add_option("--headers", dest="headers", action="store_true", help="print HTTP headers instead of body")
parser.add_option("--headers", dest="headers", action="store_true", \
help="print HTTP headers instead of body")
def run(self, args, opts):
if not args:

View File

@ -18,11 +18,16 @@ class Command(ScrapyCommand):
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
parser.add_option("--nolinks", dest="nolinks", action="store_true", help="don't show extracted links")
parser.add_option("--noitems", dest="noitems", action="store_true", help="don't show scraped items")
parser.add_option("--nocolour", dest="nocolour", action="store_true", help="avoid using pygments to colorize the output")
parser.add_option("-r", "--rules", dest="rules", action="store_true", help="try to match and parse the url with the defined rules (if any)")
parser.add_option("-c", "--callbacks", dest="callbacks", action="store", help="use the provided callback(s) for parsing the url (separated with commas)")
parser.add_option("--nolinks", dest="nolinks", action="store_true", \
help="don't show extracted links")
parser.add_option("--noitems", dest="noitems", action="store_true", \
help="don't show scraped items")
parser.add_option("--nocolour", dest="nocolour", action="store_true", \
help="avoid using pygments to colorize the output")
parser.add_option("-r", "--rules", dest="rules", action="store_true", \
help="try to match and parse the url with the defined rules (if any)")
parser.add_option("-c", "--callbacks", dest="callbacks", action="store", \
help="use the provided callback(s) for parsing the url (separated with commas)")
def process_options(self, args, opts):
super(Command, self).process_options(args, opts)
@ -91,7 +96,8 @@ class Command(ScrapyCommand):
self.print_results(items, links, rule.callback, opts)
break
else:
log.msg('No rules found for spider "%s", please specify a callback for parsing' % spider.domain_name)
log.msg('No rules found for spider "%s", please specify a callback for parsing' \
% spider.domain_name)
continue
else: