def __init__(self):
     CrawlerBase.__init__(self)
     self.task_queue = queue.Queue()
     self.lock = threading.Lock()
 def savePageContent(self, data):
     self.lock.acquire()
     c = CrawlerBase.savePageContent(self, data)
     self.total_new = self.total_new + c
     self.lock.release()
     return c
Beispiel #3
0
 def __init__(self):
     CrawlerBase.__init__(self)
     # 使用长连接,所有请求都用一个 session
     self.session = aiohttp.ClientSession()
Beispiel #4
0
 def __init__(self):
     CrawlerBase.__init__(self)
Beispiel #5
0
 def savePageContent(self, data):
     self.lock.acquire()
     c = CrawlerBase.savePageContent(self, data)
     self.total_new = self.total_new + c
     self.lock.release()
     return c
Beispiel #6
0
 def __init__(self):
     CrawlerBase.__init__(self)
Beispiel #7
0
 def singleRequest(self, i):
     return CrawlerBase.singleRequest(self, i)
Beispiel #8
0
 def __init__(self):
     CrawlerBase.__init__(self)
     self.task_queue = queue.Queue()
     self.lock = threading.Lock()
Beispiel #9
0
 def hasDuplicate(self, data):
     return CrawlerBase.hasDuplicate(self, data)
Beispiel #10
0
 def addRecord(self, data):
     return CrawlerBase.addRecord(self, data)
Beispiel #11
0
 def fetchPageContent(self, post={}):
     return CrawlerBase.fetchPageContent(self, post)
Beispiel #12
0
 def singleRequest(self, i):
     return CrawlerBase.singleRequest(self, i)
Beispiel #13
0
 def addRecord(self, data):
     return CrawlerBase.addRecord(self, data)
Beispiel #14
0
 def hasDuplicate(self, data):
     return CrawlerBase.hasDuplicate(self, data)
Beispiel #15
0
 def fetchPageContent(self, post={}):
     return CrawlerBase.fetchPageContent(self, post)