예제 #1
0
파일: crawler.py 프로젝트: pyarnold/scrapy
 def start_crawling(self):
     log.scrapy_info(self.settings)
     return self._start_crawler() is not None
예제 #2
0
파일: crawler.py 프로젝트: DLBob/scrapy
 def __init__(self, settings):
     super(CrawlerProcess, self).__init__(settings)
     install_shutdown_handlers(self._signal_shutdown)
     self.stopping = False
     self.log_observer = log.start_from_settings(self.settings)
     log.scrapy_info(settings)
예제 #3
0
파일: crawler.py 프로젝트: zxsted/scrapy
 def __init__(self, settings):
     super(CrawlerProcess, self).__init__(settings)
     install_shutdown_handlers(self._signal_shutdown)
     self.stopping = False
     self.log_observer = log.start_from_settings(self.settings)
     log.scrapy_info(settings)
예제 #4
0
 def start_crawling(self):
     log.scrapy_info(self.settings)
     return self._start_crawler() is not None
예제 #5
0
파일: crawler.py 프로젝트: Bia-lx/scrapy
 def start(self, stop_after_crawl=True, start_reactor=True):
     self.log_observer = log.start_from_settings(self.settings)
     log.scrapy_info(self.settings)
     if start_reactor:
         self._start_reactor(stop_after_crawl)
예제 #6
0
 def start(self, stop_after_crawl=True, start_reactor=True):
     self.log_observer = log.start_from_settings(self.settings)
     log.scrapy_info(self.settings)
     if start_reactor:
         self._start_reactor(stop_after_crawl)