def __init__(self):
     CrawlerBase.__init__(self)
     self.task_queue = queue.Queue()
     self.lock = threading.Lock()
 def savePageContent(self, data):
     self.lock.acquire()
     c = CrawlerBase.savePageContent(self, data)
     self.total_new = self.total_new + c
     self.lock.release()
     return c
 def __init__(self):
     CrawlerBase.__init__(self)
     # 使用长连接,所有请求都用一个 session
     self.session = aiohttp.ClientSession()
Exemple #4
0
 def __init__(self):
     CrawlerBase.__init__(self)
Exemple #5
0
 def savePageContent(self, data):
     self.lock.acquire()
     c = CrawlerBase.savePageContent(self, data)
     self.total_new = self.total_new + c
     self.lock.release()
     return c
Exemple #6
0
 def __init__(self):
     CrawlerBase.__init__(self)
Exemple #7
0
 def singleRequest(self, i):
     return CrawlerBase.singleRequest(self, i)
Exemple #8
0
 def __init__(self):
     CrawlerBase.__init__(self)
     self.task_queue = queue.Queue()
     self.lock = threading.Lock()
Exemple #9
0
 def hasDuplicate(self, data):
     return CrawlerBase.hasDuplicate(self, data)
Exemple #10
0
 def addRecord(self, data):
     return CrawlerBase.addRecord(self, data)
Exemple #11
0
 def fetchPageContent(self, post={}):
     return CrawlerBase.fetchPageContent(self, post)
Exemple #12
0
 def singleRequest(self, i):
     return CrawlerBase.singleRequest(self, i)
Exemple #13
0
 def addRecord(self, data):
     return CrawlerBase.addRecord(self, data)
Exemple #14
0
 def hasDuplicate(self, data):
     return CrawlerBase.hasDuplicate(self, data)
Exemple #15
0
 def fetchPageContent(self, post={}):
     return CrawlerBase.fetchPageContent(self, post)