def _start_download(self, metainfo, feedback, save_path): self.feedback = feedback config = self.config self.infohash = metainfo.infohash self.total_bytes = metainfo.total_bytes if not metainfo.reported_errors: metainfo.show_encoding_errors(self._error) myid = self._make_id() seed(myid) def schedfunc(func, delay): self._rawserver.add_task(func, delay, context=self) def externalsched(func, delay): self._rawserver.external_add_task(func, delay, context=self) if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] self._filepool.add_files(myfiles, self) self._myfiles = myfiles self._storage = Storage(config, self._filepool, zip(myfiles, metainfo.sizes)) resumefile = None if config['data_dir']: filename = os.path.join(config['data_dir'], 'resume', self.infohash.encode('hex')) if os.path.exists(filename): try: resumefile = file(filename, 'rb') if self._storage.check_fastresume(resumefile) == 0: resumefile.close() resumefile = None except Exception, e: self._error(WARNING, _("Could not load fastresume data: %s") % str(e) + ' ' + _("Will perform full hash check.")) if resumefile is not None: resumefile.close() resumefile = None
def get_completion(self, config, metainfo, save_path, filelist=False): if not config['data_dir']: return None infohash = metainfo.infohash if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] if metainfo.total_bytes == 0: if filelist: return None return 1 try: s = Storage(None, None, zip(myfiles, metainfo.sizes), check_only=True) except: return None filename = os.path.join(config['data_dir'], 'resume', infohash.encode('hex')) try: f = file(filename, 'rb') except: f = None try: r = s.check_fastresume(f, filelist, metainfo.piece_length, len(metainfo.hashes), myfiles) except: r = None if f is not None: f.close() if r is None: return None if filelist: return r[0] / metainfo.total_bytes, r[1], r[2] return r / metainfo.total_bytes