1
0
mirror of https://github.com/scrapy/scrapy.git synced 2025-02-25 10:03:54 +00:00

added support for -a option to runspider command (like it works with crawl command)

This commit is contained in:
Pablo Hoffman 2011-05-17 22:07:49 -03:00
parent ab6a4d053f
commit 315457c2ef

View File

@ -4,6 +4,7 @@ import os
from scrapy.utils.spider import iter_spider_classes from scrapy.utils.spider import iter_spider_classes
from scrapy.command import ScrapyCommand from scrapy.command import ScrapyCommand
from scrapy.exceptions import UsageError from scrapy.exceptions import UsageError
from scrapy.utils.conf import arglist_to_dict
def _import_file(filepath): def _import_file(filepath):
abspath = os.path.abspath(filepath) abspath = os.path.abspath(filepath)
@ -33,8 +34,17 @@ class Command(ScrapyCommand):
def long_desc(self): def long_desc(self):
return "Run the spider defined in the given file" return "Run the spider defined in the given file"
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
parser.add_option("-a", dest="spargs", action="append", default=[], metavar="NAME=VALUE", \
help="set spider argument (may be repeated)")
def process_options(self, args, opts): def process_options(self, args, opts):
ScrapyCommand.process_options(self, args, opts) ScrapyCommand.process_options(self, args, opts)
try:
opts.spargs = arglist_to_dict(opts.spargs)
except ValueError:
raise UsageError("Invalid -a value, use -a NAME=VALUE", print_help=False)
def run(self, args, opts): def run(self, args, opts):
if len(args) != 1: if len(args) != 1:
@ -49,7 +59,7 @@ class Command(ScrapyCommand):
spclasses = list(iter_spider_classes(module)) spclasses = list(iter_spider_classes(module))
if not spclasses: if not spclasses:
raise UsageError("No spider found in file: %s\n" % filename) raise UsageError("No spider found in file: %s\n" % filename)
spider = spclasses.pop()() spider = spclasses.pop()(**opts.spargs)
# schedule spider and start engine # schedule spider and start engine
self.crawler.queue.append_spider(spider) self.crawler.queue.append_spider(spider)
self.crawler.start() self.crawler.start()