def add_options(self, parser): BaseRunSpiderCommand.add_options(self, parser) parser.add_option("--spider", dest="spider", default=None, help="use this spider without looking for one") parser.add_option("--pipelines", action="store_true", help="process items through pipelines") parser.add_option( "--nolinks", dest="nolinks", action="store_true", help="don't show links to follow (extracted requests)") parser.add_option("--noitems", dest="noitems", action="store_true", help="don't show scraped items") parser.add_option("--nocolour", dest="nocolour", action="store_true", help="avoid using pygments to colorize the output") parser.add_option( "-r", "--rules", dest="rules", action="store_true", help="use CrawlSpider rules to discover the callback") parser.add_option( "-c", "--callback", dest="callback", help="use this callback for parsing, instead looking for a callback" ) parser.add_option( "-m", "--meta", dest="meta", help= "inject extra meta into the Request, it must be a valid raw json string" ) parser.add_option( "--cbkwargs", dest="cbkwargs", help= "inject extra callback kwargs into the Request, it must be a valid raw json string" ) parser.add_option( "-d", "--depth", dest="depth", type="int", default=1, help="maximum depth for parsing requests [default: %default]") parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="print each depth level one by one")
def process_options(self, args, opts): BaseRunSpiderCommand.process_options(self, args, opts) self.process_request_meta(opts) self.process_request_cb_kwargs(opts)