Example #1
0
 def __init__(self):
     CrawlerBase.__init__(self)
     self.task_queue = queue.Queue()
     self.lock = threading.Lock()
Example #2
0
 def savePageContent(self, data):
     self.lock.acquire()
     c = CrawlerBase.savePageContent(self, data)
     self.total_new = self.total_new + c
     self.lock.release()
     return c
Example #3
0
 def __init__(self):
     CrawlerBase.__init__(self)
     # 使用长连接,所有请求都用一个 session
     self.session = aiohttp.ClientSession()
Example #4
0
 def __init__(self):
     CrawlerBase.__init__(self)
Example #5
0
 def savePageContent(self, data):
     self.lock.acquire()
     c = CrawlerBase.savePageContent(self, data)
     self.total_new = self.total_new + c
     self.lock.release()
     return c
Example #6
0
 def __init__(self):
     CrawlerBase.__init__(self)
Example #7
0
 def singleRequest(self, i):
     return CrawlerBase.singleRequest(self, i)
Example #8
0
 def __init__(self):
     CrawlerBase.__init__(self)
     self.task_queue = queue.Queue()
     self.lock = threading.Lock()
Example #9
0
 def hasDuplicate(self, data):
     return CrawlerBase.hasDuplicate(self, data)
Example #10
0
 def addRecord(self, data):
     return CrawlerBase.addRecord(self, data)
Example #11
0
 def fetchPageContent(self, post={}):
     return CrawlerBase.fetchPageContent(self, post)
Example #12
0
 def singleRequest(self, i):
     return CrawlerBase.singleRequest(self, i)
Example #13
0
 def addRecord(self, data):
     return CrawlerBase.addRecord(self, data)
Example #14
0
 def hasDuplicate(self, data):
     return CrawlerBase.hasDuplicate(self, data)
Example #15
0
 def fetchPageContent(self, post={}):
     return CrawlerBase.fetchPageContent(self, post)