def get_completion(self, config, metainfo, save_path, filelist=False): if not config["data_dir"]: return None infohash = metainfo.infohash if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] if metainfo.file_size == 0: if filelist: return None return 1 try: s = Storage(None, None, zip(myfiles, metainfo.sizes), check_only=True) except: return None filename = os.path.join(config["data_dir"], "resume", infohash.encode("hex")) try: f = file(filename, "rb") except: f = None try: r = s.check_fastresume(f, filelist, metainfo.piece_length, len(metainfo.hashes), myfiles) except: r = None if f is not None: f.close() if r is None: return None if filelist: return r[0] / metainfo.file_size, r[1], r[2] return r / metainfo.file_size
def _start_download(self, metainfo, feedback, save_path): # GTK Crash Hack import time time.sleep(.2) self.feedback = feedback self._set_auto_uploads() self.infohash = metainfo.infohash self.file_size = metainfo.file_size if not metainfo.reported_errors: metainfo.show_encoding_errors(log.error) if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] self._filepool.add_files(myfiles, self) self._myfiles = myfiles self._storage = Storage(self.config, self._filepool, zip(myfiles, metainfo.sizes)) resumefile = None if self.config['data_dir']: filename = os.path.join(self.config['data_dir'], 'resume', self.infohash.encode('hex')) if os.path.exists(filename): try: resumefile = file(filename, 'rb') if self._storage.check_fastresume(resumefile) == 0: resumefile.close() resumefile = None except Exception, e: log.info("Could not load fastresume data: "+ str(e) + ". Will perform full hash check.") if resumefile is not None: resumefile.close() resumefile = None
def get_completion(self, config, metainfo, save_path, filelist=False): if not config['data_dir']: return None infohash = metainfo.infohash if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] if metainfo.file_size == 0: if filelist: return None return 1 try: s = Storage(None, None, zip(myfiles, metainfo.sizes), check_only=True) except: return None filename = os.path.join(config['data_dir'], 'resume', infohash.encode('hex')) try: f = file(filename, 'rb') except: f = None try: r = s.check_fastresume(f, filelist, metainfo.piece_length, len(metainfo.hashes), myfiles) except: r = None if f is not None: f.close() if r is None: return None if filelist: return r[0] / metainfo.file_size, r[1], r[2] return r / metainfo.file_size
def _start_download(self, metainfo, feedback, save_path): # GTK Crash Hack import time time.sleep(0.2) self.feedback = feedback self._set_auto_uploads() self.infohash = metainfo.infohash self.file_size = metainfo.file_size if not metainfo.reported_errors: metainfo.show_encoding_errors(log.error) if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] self._filepool.add_files(myfiles, self) self._myfiles = myfiles self._storage = Storage(self.config, self._filepool, zip(myfiles, metainfo.sizes)) resumefile = None if self.config["data_dir"]: filename = os.path.join(self.config["data_dir"], "resume", self.infohash.encode("hex")) if os.path.exists(filename): try: resumefile = file(filename, "rb") if self._storage.check_fastresume(resumefile) == 0: resumefile.close() resumefile = None except Exception, e: log.info("Could not load fastresume data: " + str(e) + ". Will perform full hash check.") if resumefile is not None: resumefile.close() resumefile = None
class _SingleTorrent(object): def __init__( self, event_handler, singleport_listener, ratelimiter, filepool, config, neighbors, certificate, sessionid ): self.event_handler = event_handler self._singleport_listener = singleport_listener self._ratelimiter = ratelimiter self._filepool = filepool self.config = dict(config) self._storage = None self._storagewrapper = None self._ratemeasure = None self._upmeasure = None self._downmeasure = None self._torrent = None self._rerequest = None self._statuscollecter = None self._announced = False self._listening = False self.reserved_ports = [] self.reported_port = None self._myfiles = None self.started = False self.is_seed = False self.closed = False self.infohash = None self.file_size = None self._doneflag = threading.Event() self.finflag = threading.Event() self._hashcheck_thread = None self._contfunc = None self._activity = ("Initial startup", 0) self.feedback = None self.messages = [] self.neighbors = neighbors self.certificate = certificate self.sessionid = sessionid def schedule(self, delay, func): self.event_handler.schedule(delay, func, context=self) def start_download(self, *args, **kwargs): it = self._start_download(*args, **kwargs) def cont(): try: it.next() except StopIteration: self._contfunc = None def contfunc(): self.schedule(0, cont) self._contfunc = contfunc contfunc() def _start_download(self, metainfo, feedback, save_path): # GTK Crash Hack import time time.sleep(0.2) self.feedback = feedback self._set_auto_uploads() self.infohash = metainfo.infohash self.file_size = metainfo.file_size if not metainfo.reported_errors: metainfo.show_encoding_errors(log.error) if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] self._filepool.add_files(myfiles, self) self._myfiles = myfiles self._storage = Storage(self.config, self._filepool, zip(myfiles, metainfo.sizes)) resumefile = None if self.config["data_dir"]: filename = os.path.join(self.config["data_dir"], "resume", self.infohash.encode("hex")) if os.path.exists(filename): try: resumefile = file(filename, "rb") if self._storage.check_fastresume(resumefile) == 0: resumefile.close() resumefile = None except Exception, e: log.info("Could not load fastresume data: " + str(e) + ". Will perform full hash check.") if resumefile is not None: resumefile.close() resumefile = None def data_flunked(amount, index): self._ratemeasure.data_rejected(amount) log.info("piece %d failed hash check, " "re-downloading it" % index) backthread_exception = None def hashcheck(): def statusfunc(activity=None, fractionDone=0): if activity is None: activity = self._activity[0] self._activity = (activity, fractionDone) try: self._storagewrapper = StorageWrapper( self._storage, self.config, metainfo.hashes, metainfo.piece_length, self._finished, statusfunc, self._doneflag, data_flunked, self.infohash, resumefile, ) except: backthread_exception = sys.exc_info() self._contfunc() thread = threading.Thread(target=hashcheck) thread.setDaemon(False) self._hashcheck_thread = thread thread.start() yield None self._hashcheck_thread = None if resumefile is not None: resumefile.close() if backthread_exception: a, b, c = backthread_exception raise a, b, c if self._storagewrapper.amount_left == 0: self._finished() choker = Choker(self.config, self.schedule, self.finflag.isSet) upmeasure = Measure(self.config["max_rate_period"]) downmeasure = Measure(self.config["max_rate_period"]) self._upmeasure = upmeasure self._downmeasure = downmeasure self._ratemeasure = RateMeasure(self._storagewrapper.amount_left_with_partials) picker = PiecePicker(len(metainfo.hashes), self.config) for i in xrange(len(metainfo.hashes)): if self._storagewrapper.do_I_have(i): picker.complete(i) for i in self._storagewrapper.stat_dirty: picker.requested(i) def kickpeer(connection): def kick(): connection.close() self.schedule(0, kick) downloader = Downloader( self.config, self._storagewrapper, picker, len(metainfo.hashes), downmeasure, self._ratemeasure.data_came_in, kickpeer, ) def make_upload(connection): return Upload( connection, self._ratelimiter, upmeasure, choker, self._storagewrapper, self.config["max_slice_length"], self.config["max_rate_period"], ) self._torrent = Torrent(self.infohash, make_upload, downloader, len(metainfo.hashes), self) self.reported_port = self.config["forwarded_port"] if not self.reported_port: self.reported_port = self._singleport_listener.get_port(self.neighbors) self.reserved_ports.append(self.reported_port) self.neighbors.add_torrent(self.infohash, self._torrent) self._listening = True self._rerequest = Rerequester( metainfo.announce, self.config, self.schedule, self.neighbors, self._storagewrapper.get_amount_left, upmeasure.get_total, downmeasure.get_total, self.reported_port, self.infohash, self.finflag, self.internal_shutdown, self._announce_done, self.certificate, self.sessionid, ) self._statuscollecter = DownloaderFeedback( choker, upmeasure.get_rate, downmeasure.get_rate, upmeasure.get_total, downmeasure.get_total, self.neighbors.get_relay_stats, self._ratemeasure.get_time_left, self._ratemeasure.get_size_left, self.file_size, self.finflag, downloader, self._myfiles, ) self._announced = True self._rerequest.begin() self.started = True if not self.finflag.isSet(): self._activity = ("downloading", 0) self.feedback.started(self)
class _SingleTorrent(object): def __init__(self, event_handler, singleport_listener, ratelimiter, filepool, config, neighbors, certificate, sessionid): self.event_handler = event_handler self._singleport_listener = singleport_listener self._ratelimiter = ratelimiter self._filepool = filepool self.config = dict(config) self._storage = None self._storagewrapper = None self._ratemeasure = None self._upmeasure = None self._downmeasure = None self._torrent = None self._rerequest = None self._statuscollecter = None self._announced = False self._listening = False self.reserved_ports = [] self.reported_port = None self._myfiles = None self.started = False self.is_seed = False self.closed = False self.infohash = None self.file_size = None self._doneflag = threading.Event() self.finflag = threading.Event() self._hashcheck_thread = None self._contfunc = None self._activity = ('Initial startup', 0) self.feedback = None self.messages = [] self.neighbors = neighbors self.certificate = certificate self.sessionid = sessionid def schedule(self, delay, func): self.event_handler.schedule(delay, func, context=self) def start_download(self, *args, **kwargs): it = self._start_download(*args, **kwargs) def cont(): try: it.next() except StopIteration: self._contfunc = None def contfunc(): self.schedule(0, cont) self._contfunc = contfunc contfunc() def _start_download(self, metainfo, feedback, save_path): # GTK Crash Hack import time time.sleep(.2) self.feedback = feedback self._set_auto_uploads() self.infohash = metainfo.infohash self.file_size = metainfo.file_size if not metainfo.reported_errors: metainfo.show_encoding_errors(log.error) if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] self._filepool.add_files(myfiles, self) self._myfiles = myfiles self._storage = Storage(self.config, self._filepool, zip(myfiles, metainfo.sizes)) resumefile = None if self.config['data_dir']: filename = os.path.join(self.config['data_dir'], 'resume', self.infohash.encode('hex')) if os.path.exists(filename): try: resumefile = file(filename, 'rb') if self._storage.check_fastresume(resumefile) == 0: resumefile.close() resumefile = None except Exception, e: log.info("Could not load fastresume data: "+ str(e) + ". Will perform full hash check.") if resumefile is not None: resumefile.close() resumefile = None def data_flunked(amount, index): self._ratemeasure.data_rejected(amount) log.info('piece %d failed hash check, ' 're-downloading it' % index) backthread_exception = None def hashcheck(): def statusfunc(activity = None, fractionDone = 0): if activity is None: activity = self._activity[0] self._activity = (activity, fractionDone) try: self._storagewrapper = StorageWrapper(self._storage, self.config, metainfo.hashes, metainfo.piece_length, self._finished, statusfunc, self._doneflag, data_flunked, self.infohash, resumefile) except: backthread_exception = sys.exc_info() self._contfunc() thread = threading.Thread(target = hashcheck) thread.setDaemon(False) self._hashcheck_thread = thread thread.start() yield None self._hashcheck_thread = None if resumefile is not None: resumefile.close() if backthread_exception: a, b, c = backthread_exception raise a, b, c if self._storagewrapper.amount_left == 0: self._finished() choker = Choker(self.config, self.schedule, self.finflag.isSet) upmeasure = Measure(self.config['max_rate_period']) downmeasure = Measure(self.config['max_rate_period']) self._upmeasure = upmeasure self._downmeasure = downmeasure self._ratemeasure = RateMeasure(self._storagewrapper. amount_left_with_partials) picker = PiecePicker(len(metainfo.hashes), self.config) for i in xrange(len(metainfo.hashes)): if self._storagewrapper.do_I_have(i): picker.complete(i) for i in self._storagewrapper.stat_dirty: picker.requested(i) def kickpeer(connection): def kick(): connection.close() self.schedule(0, kick) downloader = Downloader(self.config, self._storagewrapper, picker, len(metainfo.hashes), downmeasure, self._ratemeasure.data_came_in, kickpeer) def make_upload(connection): return Upload(connection, self._ratelimiter, upmeasure, choker, self._storagewrapper, self.config['max_slice_length'], self.config['max_rate_period']) self._torrent = Torrent(self.infohash, make_upload, downloader, len(metainfo.hashes), self) self.reported_port = self.config['forwarded_port'] if not self.reported_port: self.reported_port = self._singleport_listener.get_port(self.neighbors) self.reserved_ports.append(self.reported_port) self.neighbors.add_torrent(self.infohash, self._torrent) self._listening = True self._rerequest = Rerequester(metainfo.announce, self.config, self.schedule, self.neighbors, self._storagewrapper.get_amount_left, upmeasure.get_total, downmeasure.get_total, self.reported_port, self.infohash, self.finflag, self.internal_shutdown, self._announce_done, self.certificate, self.sessionid) self._statuscollecter = DownloaderFeedback(choker, upmeasure.get_rate, downmeasure.get_rate, upmeasure.get_total, downmeasure.get_total, self.neighbors.get_relay_stats, self._ratemeasure.get_time_left, self._ratemeasure.get_size_left, self.file_size, self.finflag, downloader, self._myfiles) self._announced = True self._rerequest.begin() self.started = True if not self.finflag.isSet(): self._activity = ('downloading', 0) self.feedback.started(self)