def run(self): try: self.runLock.acquire() while not self.shouldStop: event, shouldExec = self._getEvent() if event is None: # nothing in the queue self._wait() else: if shouldExec: # execute try: # self.log.debug("Executing event:\nfunc: %s\nargs: %s\nkws: %s", str(event['task']), str(event['args']), str(event['kw'])) apply(event["task"], event["args"], event["kw"]) except: self.log.error("Execution of event failed:\n%s", logTraceback()) else: # wait self._wait(event["time"] - time()) self.log.debug("Stopping") self.thread = None self.runLock.release() except: self.log.error("Error in main loop:\n%s", logTraceback())
def _start(self, loadSuccess): try: if loadSuccess: #loading was successful, add to handlers self.log.debug("Reseting requester") self.requester.reset() self.log.debug("Starting transfer measurement") self.inRate.start() self.outRate.start() self.log.debug("Adding us to connection handler") self.connHandler.addTorrent(self.torrentIdent, self.torrent, self.pieceStatus, self.inRate, self.outRate, self.storage, self.filePrio, self.requester, self.superSeedingHandler) self.log.debug("Adding us to connection listener") self.connListener.addTorrent(self.torrentIdent, self.torrent.getTorrentHash()) self.log.debug("Adding us to connection builder") self.connBuilder.addTorrent(self.torrentIdent, self.torrent.getTorrentHash()) self.log.debug("Adding us to choker") self.choker.addTorrent(self.torrentIdent, self.storage.getStatus(), self.superSeedingHandler) self.log.debug("Starting tracker requester") self.trackerRequester.start() self.started = True self.state = 'running' except: #something failed - hard self.log.error("Error in load function:\n%s", logTraceback())
def OnAddFromUrl(self, event): #torrentpath downloadDefaultDir = self.config.get('paths','downloadFolder') #let user enter a url diag = wx.TextEntryDialog(self, message='Please enter the http url of the torrent file:', caption='Enter url', defaultValue='http://') if diag.ShowModal() == wx.ID_OK: #user did select something torrentUrl = diag.GetValue() #directory in which the download data should be stored saveDiag = wx.DirDialog(self, message='Select the directory in which the downloaded data should be stored',\ defaultPath=downloadDefaultDir, style=wx.DD_NEW_DIR_BUTTON) if saveDiag.ShowModal() == wx.ID_OK: #user selected something savePath = saveDiag.GetPath() #load torrents one by one self.log.info('Adding torrent with data path "%s"', savePath) try: self.torrentList.addTorrentByUrl(torrentUrl, savePath) except MultiBtException, e: self.log.error('Failed to add torrent, reason: %s', e.reason) showErrorMessage(self, '%s.', e.reason) except Exception, e: self.log.critical('Internal error while adding torrent:\n%s', logTraceback()) showErrorMessage(self, 'Internal error, torrent not added.\n%s.', logTraceback())
def showGui(path, config, torrentHandler, persister, version): app = wx.App() test = Gui(path, config, torrentHandler, persister, version) try: app.MainLoop() except: print 'Main loop failed:', str(logTraceback())
def run(self): try: self.lock.acquire() while not self.shouldStop: self.lock.release() recvable, sendable, errored = self.samSockManager.select(self.allConns, set(), self.allConns, timeout=1) self.lock.acquire() for connId in errored: if connId in self.allConns: if not connId == self.listenConn: self._closeConn(connId, 'conn failed') for connId in recvable: if connId in self.allConns: if connId == self.listenConn: #new incomming connections self._acceptConns() else: #received data self.log.debug('Conn %i: Received data', connId) self._recvFromConn(connId) #close listening conn self.log.debug("Closing listening socket") self.samSockManager.close(self.listenConn, force=True) #really terminate self.thread = None self.log.info("Stopping") self.lock.release() except: self.log.error('Error in main loop:\n%s', logTraceback())
def run(self): try: self.lock.acquire() while not self.shouldStop: self.lock.release() recvable, sendable, errored = self.samSockManager.select(self.connsWithRecvInterest, self.connsWithSendInterest, self.allConns, timeout=1) self.lock.acquire() for connId in recvable: if connId in self.allConns: #received data self._recv(connId) for connId in errored: #conn failed, close it if connId in self.allConns: connSet = self.conns[connId] if connSet['connected']: self._retryRequest(connSet['requestId'], 'connection failed') else: self._retryRequest(connSet['requestId'], 'connect failed') for connId in sendable: if connId in self.allConns: self._send(connId) self.thread = None self.log.info("Stopping") self.lock.release() except: self.log.error('Error in main loop:\n%s', logTraceback())
def _reportRequestResult(self, requestSet, result): self.lock.release() try: apply(requestSet['callback'], [result]+requestSet['callbackArgs'], requestSet['callbackKws']) except: self.log.warn('Error while executing request callback:\n%s', logTraceback()) self.lock.acquire()
def __del__(self): try: if self.finishIterFunc is not None: self.finishIterFunc() except: log = logging.getLogger('PieceIterator') log.error('Error in __del__:\n%s', logTraceback())
def run(self): try: self.lock.acquire() while not self.shouldStop: self.lock.release() recvable, sendable, errored = self.samSockManager.select( self.connsWithRecvInterest, self.connsWithSendInterest, self.allConns, timeout=1 ) self.lock.acquire() for connId in errored: # conn failed, close it self._failedConn(connId, "connection failed") for connId in sendable: if connId in self.conns: # connected connSet = self.conns[connId] torrentInfo = self.torrents[connSet["torrentIdent"]] connSet["sock"].send(Messages.generateHandshake(torrentInfo["infohash"], self.peerId)) self.connsWithSendInterest.remove(connId) self.connsWithRecvInterest.add(connId) for connId in recvable: if connId in self.conns: # received data self._recvFromConn(connId) self.thread = None self.log.info("Stopping") self.lock.release() except: self.log.error("Error in main loop:\n%s", logTraceback())
def _stop(self): try: self.log.info('Stopping torrent handler') self.torrentHandler.stop() self.log.debug('Stopping persister') self.persister.stop() self.log.debug('Cleanup of log related things') self.logController.shutdown() except: self.log.error("Failure while shutting down:\n%s", logTraceback())
def _load(self, completionCallback): with self.loadLock: #inside lock self.log.info('Loading files of torrent') loadSuccess = False try: #check files of torrent if not self.config.get('storage', 'skipFileCheck'): #skipping is not allowed self.log.debug('Not allowed to skip file checking, starting check') allCreated, anyModified = self._checkAllFiles() self.btPersister.store('Storage-checkedFiles', True) elif not self.btPersister.get('Storage-checkedFiles', False): #skipping would be allowed but we didn't check even once up to now self.log.debug('Files were not checked up to now, starting check') allCreated, anyModified = self._checkAllFiles() self.btPersister.store('Storage-checkedFiles', True) else: #skipping is allowed and files were already checked self.log.debug('Skipping file checking') allCreated = False anyModified = False #check which pieces are already finished if allCreated: #no need to check piece availability, all files were just written to disk self.log.debug('Skipping hashing, files were just created') else: #possibly need to check, some files already existed if self.ownStatus.loadPersistedData(): #persisted status info existed self.log.debug('Skipping hashing, managed to load persisted status data') else: #there is no persisted data self.log.debug('Checking which pieces are already finished') self._checkPieceAvailability() #check if loading wasn't aborted if not self.shouldAbortLoad: self.ownStatus.persist() loadSuccess = True self.loaded = True except StorageException, se: self.log.error('Failure during load:\n%s', logTraceback()) if not self.shouldAbortLoad: completionCallback(loadSuccess)
def OnTimer(self, event): try: if not self.stopFlag: #update Torrentlist self.torrentList.manualUpdate() self.childWindows.manualUpdate() #update Statusbar self.sb.manualUpdate() except: self.log.error("Failure in timer:\n%s", logTraceback())
def _sendOutRequest(self): # queue one outrequest in the outbuffer outRequest = self.outRequestQueue.pop(0) try: # try to get data data = self.outRequestHandles.pop(outRequest)() except StorageException: # failed to get data self.log.error("Failed to get data for outrequest:\n%s", logTraceback()) data = None self._fail("could not get data for outrequest") if data is not None: # got data if not len(data) == outRequest[2]: self.log.error("Did not get enough data for outrequest: expected %i, got %i!", outRequest[2], len(data)) self._fail("could not get data for outrequest") else: message = Messages.generatePiece(outRequest[0], outRequest[1], data) self.outRequestsInFlight += 1 self._queueSend(message, self._outRequestGotSend, [outRequest[2]])
def run(self): try: self.lock.acquire() while not self.shouldStop: recv, send, error = self.connStatus.getSelectSets() self.lock.release() recv, send, error = self.selectFunc(recv, send, error, timeout=0.25) self.lock.acquire() #failed conns for connId in error: if connId in self.conns: #conn still exists self._removeConnection(connId, "connection failed") #readable conns for connId in recv: if connId in self.conns: #conn still exists conn = self.conns[connId] messages = conn.recv() for msgNum, message in messages: if self._checkMessage(conn, msgNum, message): #message is somewhat sane self._handleMessage(connId, conn, message) #sendable conns for connId in send: if connId in self.conns: #conn still exists self.conns[connId].sendEvent() self.thread = None self.log.info("Stopping") self.lock.release() except: self.log.error('Error in main loop:\n%s', logTraceback())
def _parseAnnounceResponse(self, trackerSet, data): url = trackerSet['logUrl'] result = u'Invalid Response' #May be "Invalid Response", "Request Failed", "No Peers" or "Ok" errorMsg = None try: response = bdecode(data) except: self.log.warn('Failed to parse announce response from tracker "%s":\n%s', logTraceback(), url) response = None if response is not None: if not isinstance(response, dict): #whatever this is, its not a standard response self.log.error('Announce response from tracker "%s" is in an unknown format', url) else: if 'failure reason' in response: #request failed result = u'Request Failed' errorMsg = unicode(response['failure reason'], 'ascii', 'ignore') self.log.warn('Announce request to tracker "%s" failed: "%s"', url, unicode(response['failure reason'], 'ascii', 'ignore')) else: if 'warning message' in response: #just a warning self.log.warn('Announce request to tracker "%s" got warned: "%s"', url, unicode(response['warning message'], 'ascii', 'ignore')) if not 'peers' in response: #no peers in response result = u'No Peers' self.log.info('Tracker "%s" did not return any peers in its announce response', url) elif not isinstance(response['peers'], list): #probably a compact response - can only be used for IPs, so how should this be used with I2P? self.log.error('Tracker "%s" responded with a compact response to the announce request - not interpretable!', url) elif len(response['peers'])==0: #no peers in response result = u'No Peers' self.log.info('Tracker "%s" did not supply any peers in its announce response', url) else: #something valid result = u'No Peers' ownAddr = self.ownAddrFunc() for peer in response['peers']: #check each peer if not isinstance(peer, dict): #whatever this is, its nothing normal self.log.error('Tracker "%s" supplied peers in an unknown format in its announce response', url) elif not 'ip' in peer: #uhm, a peer without ip?! self.log.error('Tracker "%s" supplied peer data without desintations in its announce response!', url) elif not isinstance(peer['ip'], str): #uh, what kind of destination is this?! self.log.error('Tracker "%s" supplied a peer destination of the type "%s" in its announce response!', url, type(peer['ip'])) else: #finally, all checks passed, now parse the peer address parseResult = self.i2pHostChecker.search(peer['ip']) if parseResult is None: #urgh, address is invalid, all the trouble for nothing self.log.error('Tracker "%s" returned invalid peer with address "%s" in its announce response', url, peer['ip']) else: #valid address peerAddr = parseResult.group(1) if not peerAddr == ownAddr: result = u'Ok' self.log.debug('Tracker "%s" returned valid peer with address "%s" in its announce response', url, peerAddr) self.peerPool.addPossibleConnections(self.torrentIdent, [peerAddr]) return result, errorMsg
def finishedRequest(self, data, conn, pieceIndex, offset): #finished a request assert not self.ownStatus.isFinished(), 'already seed but finished a request?!' finishedPiece = False request = self.requestedPieces[pieceIndex] try: self.storage.storeData(pieceIndex, data, offset) success = True except: self.log.error('Failed to store data of piece "%i", offset "%i":\n%s', pieceIndex, offset, logTraceback()) success = False if not success: #failed to store data canceledConns = [] request.failedRequest(offset, conn) else: #stored data canceledConns = request.finishedRequest(offset, conn) #check if request is finished if request.isFinished(): #finished piece del self.requestedPieces[pieceIndex] #get data try: pieceData = self.storage.getData(pieceIndex, 0, request.getPieceSize()) except: pieceData = '' self.log.error('Failed to read data of piece "%i":\n%s', pieceIndex, logTraceback()) #check data if sha1(pieceData).digest() == self.torrent.getPieceHashByPieceIndex(pieceIndex): #success finishedPiece = True self.ownStatus.gotPiece(pieceIndex) self.pieceStatus.setConcurrentRequestsCounter((pieceIndex,), -2) else: #failure if not pieceData == '': self.log.warn("Checksum error for retrieved piece %d!", pieceIndex) self.pieceStatus.setConcurrentRequestsCounter((pieceIndex,), -1) self._tryPieceWithWaitingConns(pieceIndex) if self.ownStatus.isFinished(): #clear waiting conns self.waitingConns.clear() else: #make requests for the current conn self._makeRequestsForConn(conn) #make requests for canceled ones for conn in canceledConns: self._makeRequestsForConn(conn) return finishedPiece
fl = open(torrentFilePath, 'rb') with fl: torrentFileData = fl.read() except (IOError, OSError): failureMsg = 'Could not read torrent file from "%s"' % encodeStrForPrinting(torrentFilePath) if failureMsg is None: #successfully read the torrent data self.log.debug('Torrent %i: trying to parse read torrent data', torrentId) torrent = Torrent() try: torrent.load(torrentFileData) except TorrentException, e: failureMsg = e.reason except: failureMsg = 'Failed to parse torrent file "%s"!\nTraceback: %s' % (encodeStrForPrinting(torrentFilePath), encodeStrForPrinting(logTraceback())) return failureMsg, torrent def _getBtObj(self, torrentId, torrentDataPath): btObj = None infoHash = None failureMsg, torrent = self._getTorrentObj(torrentId) if failureMsg is None: #valid torrent data infohash = torrent.getTorrentHash() if self.queue.setContains('torrentHash', infohash): #torrent is already on the queue failureMsg = 'Torrent is already queued' else:
def getData(self, pieceIndex, addOffset, length): with self.lock: #inside lock, get responsible files fromOffset = self.torrent.convertPieceIndexToOffset(pieceIndex, addOffset) files = self.torrent.getFilesForOffset(fromOffset, fromOffset + length) #read data from files data = [] for sfile in files: filePath = self._getFilePath(sfile['path']) try: #read data from file datafile = open(filePath, 'rb') with datafile: #file opened datafile.seek(sfile['offset']) fileData = datafile.read(sfile['bytes']) except IOError: #file operation failed raise StorageException('Failure while trying to read from file "%s":\n%s' % (filePath, logTraceback())) if len(fileData) == sfile['bytes']: #got enough data data.append(fileData) else: #too few bytes, something went wrong here - too short file?! raise StorageException('Couldn\'t read enough bytes from file "%s": wanted %i, got %i' % (sfile['path'], sfile['bytes'], len(fileData))) data = ''.join(data) return data
def storeData(self, pieceIndex, data, offset=0): with self.lock: #inside lock, get responsible files fromOffset = self.torrent.convertPieceIndexToOffset(pieceIndex) + offset files = self.torrent.getFilesForOffset(fromOffset, fromOffset+len(data)) #store data for sfile in files: filePath = self._getFilePath(sfile['path']) try: datafile = open(filePath, 'rb+') with datafile: datafile.seek(sfile['offset']) datafile.write(data[:sfile['bytes']]) data = data[sfile['bytes']:] except IOError: #file operation failed raise StorageException('Failure while trying to write to file "%s":\n%s' % (filePath, logTraceback()))
def _checkFile(self, filePath, wantedFileSize): #checks path to file and the size of the file itself, #may throw StorageException if the file path is not acceptable or a directory or file operation fails created = False modified = False #get file path realFilePath = self._getFilePath(filePath) #check directory dirPath = os.path.dirname(realFilePath) if not os.path.exists(dirPath): #directory doesn't exist, create it created = True self.log.debug('Creating Directory "%s"', dirPath) try: os.makedirs(dirPath) except OSError: raise StorageException('Failed to create directory "%s":\n%s' % (dirPath, logTraceback())) #check file if not os.path.exists(realFilePath): #file needs to be created fl = open(realFilePath, 'ab') fl.close() created = True self.log.debug('Processing file "%s" (original name "%s"): new "%s", isdir "%s", isfile "%s", islink "%s", dirname "%s", basename "%s"',\ realFilePath, filePath, str(created), str(os.path.isdir(realFilePath)), str(os.path.isfile(realFilePath)),\ str(os.path.islink(realFilePath)), dirPath, os.path.basename(realFilePath)) try: fl = open(realFilePath, 'rb+') with fl: #file opened fl.seek(0, 2) currentFileSize = fl.tell() if currentFileSize < wantedFileSize: self.log.debug('File "%s" is %d bytes to short', realFilePath, wantedFileSize - currentFileSize) modified = True else: self.log.debug('File "%s" has the correct size', realFilePath) #fill if needed if (not self.shouldAbortLoad) and currentFileSize + 1045876 < wantedFileSize: #large fill start = time() fl.seek(1048575, 1) fl.write('\x00') fl.flush() currentFileSize = fl.tell() needed = time() - start try: step = int(1048575 * 0.1 / needed) except ZeroDivisionError: step = wantedFileSize - currentFileSize - 1 step = max(1, step) self.log.debug('Needed %f seconds for 1 Mb, step size %i', needed, step) while (not self.shouldAbortLoad) and currentFileSize + step <= wantedFileSize - 1: fl.seek(step, 1) fl.write('\x00') fl.flush() currentFileSize = fl.tell() self.log.debug("Progress: %i / %i", currentFileSize, wantedFileSize) if (not self.shouldAbortLoad) and currentFileSize < wantedFileSize: #seek remaining bytes and write last byte fl.seek((wantedFileSize - currentFileSize - 1), 1) fl.write('\x00') fl.flush() currentFileSize = fl.tell() self.log.debug("Progress: %i / %i", currentFileSize, wantedFileSize) except IOError: #something failed raise StorageException('Failure while processing file "%s":\n%s' % (realFilePath, logTraceback())) return created, modified
def OnAddFromFile(self, event): #torrentpath torrentDefaultDir = self.config.get('paths','torrentFolder') downloadDefaultDir = self.config.get('paths','downloadFolder') #let user select a torrent diag = wx.FileDialog(self, message='Select the torrent to open',defaultDir=torrentDefaultDir,\ wildcard='Torrent files (*.torrent)|*.torrent|All files (*.*)|*.*',\ style=wx.OPEN | wx.MULTIPLE) if diag.ShowModal() == wx.ID_OK: #user did select something torrentPaths = diag.GetPaths() #directory in which the download data should be stored saveDiag = wx.DirDialog(self, message='Select the directory in which the downloaded data should be stored',\ defaultPath=downloadDefaultDir,style=wx.DD_NEW_DIR_BUTTON) if saveDiag.ShowModal() == wx.ID_OK: #user selected something savePath = saveDiag.GetPath() #load torrents one by one for torrentPath in torrentPaths: self.log.info('Trying to read torrent file from "%s"', torrentPath) try: fl = open(torrentPath, 'rb') with fl: data = fl.read() except (IOError, OSError): data = None if data is None: #failed to read file self.log.error('Failed to read torrent file from "%s", torrent not added', torrentPath) showErrorMessage(self, 'Failed to read torrent file from "%s".', torrentPath) else: #worked self.log.info('Adding torrent with data path "%s"', savePath) try: self.torrentList.addTorrentByFile(data, savePath) except MultiBtException, e: self.log.error('Failed to add torrent, reason: %s', e.reason) showErrorMessage(self, '%s.', e.reason) except Exception, e: self.log.critical('Internal error while adding torrent:\n%s', logTraceback()) showErrorMessage(self, 'Internal error, torrent not added.\n%s.', logTraceback())
def __init__(self, reason, *args): self.traceback = logTraceback() self.reason = reason % args Exception.__init__(self, self.reason)
def _parseScrapeResponse(self, trackerSet, data): url = trackerSet['scrapeLogUrl'] infoHash = self.torrent.getTorrentHash() valid = False try: response = bdecode(data) except: self.log.warn('Failed to parse scrape response from tracker "%s":\n%s', url, logTraceback()) response = None if response is not None: if not isinstance(response, dict): #whatever this is, its not a standard response self.log.error('Scrape response from tracker "%s" is in an unknown format', url) else: valid = True if 'failure reason' in response: #request failed self.log.warn('Scrape request to tracker "%s" failed: "%s"', url, str(response['failure reason'])) else: if 'warning message' in response: #just a warning self.log.warn('Scrape request to tracker "%s" got warned: "%s"', url, str(response['warning message'])) if not 'files' in response: #files missing self.log.warn('Scrape response from tracker "%s" is incomplete ("file" key is missing)', url) elif not isinstance(response['files'], dict): #invalid format self.log.warn('Scrape response from tracker "%s" is in an unknown format (invalid type "%s" for key "files")', url, type(response['files'])) elif not infoHash in response['files']: #missing stats for this torrent self.log.warn('Scrape response from tracker "%s" contains no stats for this torrent', url) elif not isinstance(response['files'][infoHash], dict): #invalid format self.log.warn('Scrape response from tracker "%s" is in an unknown format (invalid type "%s" for torrent stats entry)', url, type(response['files'][infoHash])) else: #ok stats = response['files'][infoHash] #try to get counts seeds = stats.get('complete', 0) if not (isinstance(seeds, int) or isinstance(seeds, long)): self.log.warn('Scrape response from tracker "%s" contains invalid "complete" stats of type "%s"', url, type(seeds)) seeds = 0 leeches = stats.get('incomplete', 0) if not (isinstance(leeches, int) or isinstance(leeches, long)): self.log.warn('Scrape response from tracker "%s" contains invalid "incomplete" stats of type "%s"', url, type(leeches)) leeches = 0 downloads = stats.get('downloaded', 0) if not (isinstance(downloads, int) or isinstance(downloads, long)): self.log.warn('Scrape response from tracker "%s" contains invalid "downloaded" stats of type "%s"', url, type(downloads)) downloads = 0 #report self.log.info('Scrape response from tracker "%s" reported %i seeds, %i leeches and %i finished downloads', url, seeds, leeches, downloads) self.trackerInfo.setScrapeStats(trackerSet['id'], seeds, leeches, downloads) return valid
self.lastError = TorrentCreatorException('InternalError:\n%s', logTraceback()) self.running = False self.thread = None def getFiles(self, dataPath): try: dataPath = os.path.normpath(os.path.abspath(os.path.expanduser(dataPath))) self._getFiles(dataPath) except TorrentCreatorException, tce: raise tce except Exception, e: raise TorrentCreatorException('InternalError:\n%s', logTraceback()) def create(self, torrentPath, dataPath, pieceLength, mainTracker, trackerList=None, creationDate=None, comment=None, creator=None): assert type(torrentPath) == unicode, 'path for torrent file not unicode?!' assert type(dataPath) == unicode, 'path to files not unicode?!' assert pieceLength > 0, 'piece length <= 0 ?!' assert self.thread is None, 'thread still running?!' self.thread = threading.Thread(target=self._create, args=(torrentPath, dataPath, pieceLength, mainTracker, trackerList, creationDate, comment, creator)) self.thread.start() def abort(self): thread = self.thread self.shouldAbort = True