def _start_download(self, metainfo, feedback, save_path): # GTK Crash Hack import time time.sleep(.2) self.feedback = feedback self._set_auto_uploads() self.infohash = metainfo.infohash self.file_size = metainfo.file_size if not metainfo.reported_errors: metainfo.show_encoding_errors(log.error) if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] self._filepool.add_files(myfiles, self) self._myfiles = myfiles self._storage = Storage(self.config, self._filepool, zip(myfiles, metainfo.sizes)) resumefile = None if self.config['data_dir']: filename = os.path.join(self.config['data_dir'], 'resume', self.infohash.encode('hex')) if os.path.exists(filename): try: resumefile = file(filename, 'rb') if self._storage.check_fastresume(resumefile) == 0: resumefile.close() resumefile = None except Exception, e: log.info("Could not load fastresume data: "+ str(e) + ". Will perform full hash check.") if resumefile is not None: resumefile.close() resumefile = None
def get_completion(self, config, metainfo, save_path, filelist=False): if not config['data_dir']: return None infohash = metainfo.infohash if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] if metainfo.file_size == 0: if filelist: return None return 1 try: s = Storage(None, None, zip(myfiles, metainfo.sizes), check_only=True) except: return None filename = os.path.join(config['data_dir'], 'resume', infohash.encode('hex')) try: f = file(filename, 'rb') except: f = None try: r = s.check_fastresume(f, filelist, metainfo.piece_length, len(metainfo.hashes), myfiles) except: r = None if f is not None: f.close() if r is None: return None if filelist: return r[0] / metainfo.file_size, r[1], r[2] return r / metainfo.file_size