Пример #1
0
 def __init__(self):
     CrawlerBase.__init__(self)
     self.task_queue = queue.Queue()
     self.lock = threading.Lock()
Пример #2
0
 def savePageContent(self, data):
     self.lock.acquire()
     c = CrawlerBase.savePageContent(self, data)
     self.total_new = self.total_new + c
     self.lock.release()
     return c
Пример #3
0
 def __init__(self):
     CrawlerBase.__init__(self)
     # 使用长连接,所有请求都用一个 session
     self.session = aiohttp.ClientSession()
Пример #4
0
 def __init__(self):
     CrawlerBase.__init__(self)
Пример #5
0
 def savePageContent(self, data):
     self.lock.acquire()
     c = CrawlerBase.savePageContent(self, data)
     self.total_new = self.total_new + c
     self.lock.release()
     return c
Пример #6
0
 def __init__(self):
     CrawlerBase.__init__(self)
Пример #7
0
 def singleRequest(self, i):
     return CrawlerBase.singleRequest(self, i)
Пример #8
0
 def __init__(self):
     CrawlerBase.__init__(self)
     self.task_queue = queue.Queue()
     self.lock = threading.Lock()
Пример #9
0
 def hasDuplicate(self, data):
     return CrawlerBase.hasDuplicate(self, data)
Пример #10
0
 def addRecord(self, data):
     return CrawlerBase.addRecord(self, data)
Пример #11
0
 def fetchPageContent(self, post={}):
     return CrawlerBase.fetchPageContent(self, post)
Пример #12
0
 def singleRequest(self, i):
     return CrawlerBase.singleRequest(self, i)
Пример #13
0
 def addRecord(self, data):
     return CrawlerBase.addRecord(self, data)
Пример #14
0
 def hasDuplicate(self, data):
     return CrawlerBase.hasDuplicate(self, data)
Пример #15
0
 def fetchPageContent(self, post={}):
     return CrawlerBase.fetchPageContent(self, post)