Пример #1
0
 def __init__(self, spider):
     Process.__init__(self)
     settings = get_project_settings()
     self.crawler = Crawler(spider.__class__, settings)
     self.crawler.signals.connect(reactor.stop,
                                  signal=signals.spider_closed)
     self.spider = spider
Пример #2
0
 def __init__(self, spider):
     Process.__init__(self)
     settings = get_project_settings()
     self.crawler = CrawlerProcess(settings)
     # self.crawler.configure()
     # self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
     self.spider = spider
Пример #3
0
 def __init__(self, spider):
     Process.__init__(self)
     os.environ.setdefault('SCRAPY_SETTINGS_MODULE', 'crawler.gov.gov.settings')
     settings = get_project_settings()
     self.crawler = Crawler(spider.__class__, settings)
     self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
     self.spider = spider
Пример #4
0
 def __init__(self, spider, accumulator):
     Process.__init__(self)
     settings = get_project_settings()
     self.crawler = Crawler(spider.__class__, settings)
     self.crawler.signals.connect(self.gather_results,
                                  signal=signals.item_passed)
     self.crawler.signals.connect(reactor.stop,
                                  signal=signals.spider_closed)
     self.spider = spider
     self.accumulator = accumulator
Пример #5
0
 def __init__(self, website_id, force=False):
     Process.__init__(self)
     settings = get_project_settings()
     self.crawler = CrawlerProcess(settings)
     self.website_id = website_id
     self.force = force
Пример #6
0
 def __init__(self, website_id):
     Process.__init__(self)
     settings = get_project_settings()
     self.crawler = CrawlerProcess(settings)
     self.website_id = website_id