def _schedule(self, request, spider): if spider is None: spider = create_spider_for_request(self.crawler.spiders, request, \ BaseSpider('default'), log_multiple=True) spider.set_crawler(self.crawler) self.crawler.engine.open_spider(spider) d = request_deferred(request) d.addCallback(lambda x: (x, spider)) self.crawler.engine.crawl(request, spider) return d
def _check_media_to_download(self, result, request, info): if result is not None: return result if self.download_func: # this ugly code was left only to support tests. TODO: remove dfd = mustbe_deferred(self.download_func, request, info.spider) dfd.addCallbacks( callback=self.media_downloaded, callbackArgs=(request, info), errback=self.media_failed, errbackArgs=(request, info)) else: request.meta['handle_httpstatus_all'] = True dfd = request_deferred(request) dfd.addCallbacks( callback=self.media_downloaded, callbackArgs=(request, info), errback=self.media_failed, errbackArgs=(request, info)) self.crawler.engine.crawl(request, info.spider) return dfd
def _schedule(self, request, spider): spider = self._open_spider(request, spider) d = request_deferred(request) d.addCallback(lambda x: (x, spider)) self.crawler.engine.crawl(request, spider) return d