def __init__(self, settings, spider_closed_callback): self.settings = settings self.closing = {} # dict (spider -> reason) of spiders being closed self.closing_dfds = { } # dict (spider -> deferred) of spiders being closed self.running = False self.paused = False self._next_request_calls = {} self.scheduler = load_object(settings['SCHEDULER'])() self.downloader = Downloader() self.scraper = Scraper(self, self.settings) self._spider_closed_callback = spider_closed_callback
def __init__(self, crawler, spider_closed_callback): self.settings = crawler.settings self.slots = {} self.running = False self.paused = False self.scheduler_cls = load_object(self.settings['SCHEDULER']) self.downloader = Downloader(crawler) self.scraper = Scraper(crawler) self._concurrent_spiders = self.settings.getint('CONCURRENT_SPIDERS', 1) if self._concurrent_spiders != 1: warnings.warn("CONCURRENT_SPIDERS settings is deprecated, use " \ "Scrapyd max_proc config instead", ScrapyDeprecationWarning) self._spider_closed_callback = spider_closed_callback