def makeRequest(self, url, callback, callbackArgs=[], callbackKws={}, addr=None, host=None, transferTimeout=120, requestTimeout=300, maxHeaderSize=4096, maxDataSize=1048576, maxReqTries=1): if type(url) == str: url = splitUrl(url) #get address and host out of url if not given seperatly if addr is None: addr = url['address'] if i2pDestHttpUrlAddrRegexObj.match(addr) is not None and len(addr) == 520: addr = addr[:-4] if host is None: if i2pDestHttpUrlAddrRegexObj.match(addr) is None: host = addr else: if len(addr) == 520: host = u'http://'+addr else: host = u'http://'+addr+u'.i2p' #convert address and host to unicode if needed if type(addr) == unicode: addr = addr.encode('UTF-8', 'ignore') if type(host) == unicode: host = host.encode('UTF-8', 'ignore') #finally really do the request self.lock.acquire() requestId = self._addRequest(addr, host, url, maxHeaderSize, maxDataSize, callback, callbackArgs, callbackKws, transferTimeout, requestTimeout, maxReqTries) self.lock.release() return requestId
def _getHttpFetchObj(self, torrentId, torrentDataPath, torrentUrlStr): httpFetchObj = None failureMsg = None try: torrentUrlSplit = splitUrl(torrentUrlStr) except HttpUtilitiesException, e: failureMsg = e.reason
def _genTrackerInfo(self, tierIdx, trackerId, trackerUrl): trackerInfo = {'url':splitUrl(trackerUrl), 'logUrl':trackerUrl, 'tier':tierIdx, 'id':trackerId, 'announceTryCount':0, 'announceTryTime':None, 'announceSuccessCount':0, 'announceSuccessTime':None, 'lastAnnounceResult':u'None', 'scrapeTryCount':0, 'scrapeTryTime':None, 'scrapeSuccessCount':0, 'scrapeSuccessTime':None, 'seedCount':0, 'leechCount':0, 'downloadCount':0} trackerInfo['scrapeUrl'], trackerInfo['scrapeLogUrl'] = self._getScrapeUrl(trackerInfo['url']) return trackerInfo
def _setTrackerInfo(self, newTrackerInfos): if newTrackerInfos is None: #restore defaults self._createTrackerInfo() else: #create set of old trackers oldTrackerIds = set(self.trackerInfos.iterkeys()) #create new tier list, add/update/remove trackers while doing so allTrackerIds = set() self.trackerTiers = [] for tierIdx, tier in enumerate(tier for tier in newTrackerInfos if len(tier) > 0): #process one tier trackerIds = [] for tracker in tier: #process one tracker trackerId = tracker['trackerId'] allTrackerIds.add(trackerId) trackerIds.append(trackerId) if not trackerId in self.trackerInfos: #new tracker self.trackerInfos[trackerId] = self._genTrackerInfo(tierIdx, trackerId, tracker['trackerUrl']) else: #old tracker oldTracker = self.trackerInfos[trackerId] oldTracker['tier'] = tierIdx oldTracker['url'] = splitUrl(tracker['trackerUrl']) oldTracker['logUrl'] = tracker['trackerUrl'] oldTracker['scrapeUrl'], oldTracker['scrapeLogUrl'] = self._getScrapeUrl(oldTracker['url']) self.trackerTiers.append(trackerIds) #remove old trackers which are not in any tier for trackerId in oldTrackerIds.difference(allTrackerIds): del self.trackerInfos[trackerId]