def __init__(self, config, rawserver, data_dir, listen_fail_ok=False, init_torrents=True, is_single_torrent=False, resume_from_torrent_config=True, bitstring=None): """ @param config: program-wide configuration object. @param rawserver: object that manages main event loop and event scheduling. @param data_dir: where variable data such as fastresume information and GUI state is saved. @param listen_fail_ok: if false, a BTFailure is raised if a server socket cannot be opened to accept incoming peer connections. @param init_torrents: restore fast resume state from prior instantiations of MultiTorrent. @param is_single_torrent: if true then allow only one torrent at a time in this MultiTorrent. @param resume_from_torrent_config: resume from ui_state files. """ # is_single_torrent will go away when we move MultiTorrent into # a separate process, in which case, single torrent applications like # curses and console will act as a client to the MultiTorrent daemon. # --Dave # init_torrents refers to fast resume rather than torrent config. # If init_torrents is set to False, the UI state file is still # read and the paths to existing downloads still used. This is # not what we want for launchmany. # # resume_from_torrent_config is separate from # is_single_torrent because launchmany must be able to have # multiple torrents while not resuming from torrent config # state. If launchmany resumes from torrent config then it # saves or seeds from the path in the torrent config even if # the file has moved in the directory tree. Because # launchmany has no mechanism for removing torrents other than # to change the directory tree, the only way for the user to # eliminate the old state is to wipe out the files in the # .bittorrent/launchmany-*/ui_state directory. This is highly # counterintuitive. Best to simply ignore the ui_state # directory altogether. --Dave assert isinstance(config, Preferences) #assert isinstance(data_dir, unicode) # temporarily commented -Dave assert isinstance(listen_fail_ok, bool) assert not (is_single_torrent and resume_from_torrent_config) self.config = config self.data_dir = data_dir self.last_save_time = 0 self.policies = [] self.torrents = {} self.running = {} self.log_root = "core.MultiTorrent" self.logger = logging.getLogger(self.log_root) self.is_single_torrent = is_single_torrent self.resume_from_torrent_config = resume_from_torrent_config self.auto_update_policy_index = None self.dht = None self.rawserver = rawserver nattraverser = NatTraverser(self.rawserver) self.internet_watcher = get_internet_watcher(self.rawserver) self.singleport_listener = SingleportListener( self.rawserver, nattraverser, self.log_root, # config['use_local_discovery'] False) self.choker = Choker(self.config, self.rawserver.add_task) self.up_ratelimiter = RateLimiter(self.rawserver.add_task) self.up_ratelimiter.set_parameters(config['max_upload_rate'], config['upload_unit_size']) self.down_ratelimiter = DownloadRateLimiter( config['download_rate_limiter_interval'], self.config['max_download_rate']) self.total_downmeasure = Measure(config['max_rate_period']) self._find_port(listen_fail_ok) self.filepool_doneflag = DeferredEvent() self.filepool = FilePool(self.filepool_doneflag, self.rawserver.add_task, self.rawserver.external_add_task, config['max_files_open'], config['num_disk_threads']) self.bitstring = bitstring if self.resume_from_torrent_config: try: self._restore_state(init_torrents) except BTFailure: # don't be retarted. self.logger.exception("_restore_state failed") def no_dump_set_option(option, value): self.set_option(option, value, dump=False) self.bandwidth_manager = BandwidthManager( self.rawserver.external_add_task, config, no_dump_set_option, self.rawserver.get_remote_endpoints, get_rates=self.get_total_rates) self.rawserver.add_task(0, self.butle)
class _SingleTorrent(object): def __init__(self, rawserver, singleport_listener, ratelimiter, filepool, config, dht): self._rawserver = rawserver self._singleport_listener = singleport_listener self._ratelimiter = ratelimiter self._filepool = filepool self._dht = dht self._storage = None self._storagewrapper = None self._ratemeasure = None self._upmeasure = None self._downmeasure = None self._encoder = None self._rerequest = None self._statuscollecter = None self._announced = False self._listening = False self.reserved_ports = [] self.reported_port = None self._myfiles = None self.started = False self.is_seed = False self.closed = False self.infohash = None self.total_bytes = None self._doneflag = threading.Event() self.finflag = threading.Event() self._hashcheck_thread = None self._contfunc = None self._activity = (_("Initial startup"), 0) self.feedback = None self.errors = [] self.rlgroup = None self.config = config def start_download(self, *args, **kwargs): it = self._start_download(*args, **kwargs) def cont(): try: it.next() except StopIteration: self._contfunc = None def contfunc(): self._rawserver.external_add_task(cont, 0, context=self) self._contfunc = contfunc contfunc() def _start_download(self, metainfo, feedback, save_path): self.feedback = feedback config = self.config self.infohash = metainfo.infohash self.total_bytes = metainfo.total_bytes if not metainfo.reported_errors: metainfo.show_encoding_errors(self._error) myid = self._make_id() seed(myid) def schedfunc(func, delay): self._rawserver.add_task(func, delay, context=self) def externalsched(func, delay): self._rawserver.external_add_task(func, delay, context=self) if metainfo.is_batch: myfiles = [os.path.join(save_path, f) for f in metainfo.files_fs] else: myfiles = [save_path] self._filepool.add_files(myfiles, self) self._myfiles = myfiles self._storage = Storage(config, self._filepool, zip(myfiles, metainfo.sizes)) resumefile = None if config['data_dir']: filename = os.path.join(config['data_dir'], 'resume', self.infohash.encode('hex')) if os.path.exists(filename): try: resumefile = file(filename, 'rb') if self._storage.check_fastresume(resumefile) == 0: resumefile.close() resumefile = None except Exception, e: self._error(WARNING, _("Could not load fastresume data: %s") % str(e) + ' ' + _("Will perform full hash check.")) if resumefile is not None: resumefile.close() resumefile = None def data_flunked(amount, index): self._ratemeasure.data_rejected(amount) self._error(INFO, _("piece %d failed hash check, re-downloading it") % index) backthread_exception = [] def errorfunc(level, text): def e(): self._error(level, text) externalsched(e, 0) def hashcheck(): def statusfunc(activity = None, fractionDone = 0): if activity is None: activity = self._activity[0] self._activity = (activity, fractionDone) try: self._storagewrapper = StorageWrapper(self._storage, config, metainfo.hashes, metainfo.piece_length, self._finished, statusfunc, self._doneflag, data_flunked, self.infohash, errorfunc, resumefile) except: backthread_exception.append(sys.exc_info()) self._contfunc() thread = threading.Thread(target = hashcheck) thread.setDaemon(False) self._hashcheck_thread = thread thread.start() yield None self._hashcheck_thread = None if resumefile is not None: resumefile.close() if backthread_exception: a, b, c = backthread_exception[0] raise a, b, c if self._storagewrapper.amount_left == 0: self._finished() choker = Choker(config, schedfunc, self.finflag.isSet) upmeasure = Measure(config['max_rate_period']) upmeasure_seedtime = Measure(config['max_rate_period_seedtime']) downmeasure = Measure(config['max_rate_period']) self._upmeasure = upmeasure self._upmeasure_seedtime = upmeasure_seedtime self._downmeasure = downmeasure self._ratemeasure = RateMeasure(self._storagewrapper. amount_left_with_partials) picker = PiecePicker(len(metainfo.hashes), config) for i in xrange(len(metainfo.hashes)): if self._storagewrapper.do_I_have(i): picker.complete(i) for i in self._storagewrapper.stat_dirty: picker.requested(i) def kickpeer(connection): def kick(): connection.close() schedfunc(kick, 0) def banpeer(ip): self._encoder.ban(ip) downloader = Downloader(config, self._storagewrapper, picker, len(metainfo.hashes), downmeasure, self._ratemeasure.data_came_in, kickpeer, banpeer) def make_upload(connection): return Upload(connection, self._ratelimiter, upmeasure, upmeasure_seedtime, choker, self._storagewrapper, config['max_slice_length'], config['max_rate_period']) self.reported_port = self.config['forwarded_port'] if not self.reported_port: self.reported_port = self._singleport_listener.get_port(self.change_port) self.reserved_ports.append(self.reported_port) if self._dht: addContact = self._dht.addContact else: addContact = None self._encoder = Encoder(make_upload, downloader, choker, len(metainfo.hashes), self._ratelimiter, self._rawserver, config, myid, schedfunc, self.infohash, self, addContact, self.reported_port) self._singleport_listener.add_torrent(self.infohash, self._encoder) self._listening = True if metainfo.is_trackerless: if not self._dht: self._error(self, CRITICAL, _("Attempt to download a trackerless torrent with trackerless client turned off.")) return else: if len(self._dht.table.findNodes(metainfo.infohash, invalid=False)) < const.K: for host, port in metainfo.nodes: self._dht.addContact(host, port) self._rerequest = DHTRerequester(config, schedfunc, self._encoder.how_many_connections, self._encoder.start_connection, externalsched, self._storagewrapper.get_amount_left, upmeasure.get_total, downmeasure.get_total, self.reported_port, myid, self.infohash, self._error, self.finflag, upmeasure.get_rate, downmeasure.get_rate, self._encoder.ever_got_incoming, self.internal_shutdown, self._announce_done, self._dht) else: self._rerequest = Rerequester(metainfo.announce, config, schedfunc, self._encoder.how_many_connections, self._encoder.start_connection, externalsched, self._storagewrapper.get_amount_left, upmeasure.get_total, downmeasure.get_total, self.reported_port, myid, self.infohash, self._error, self.finflag, upmeasure.get_rate, downmeasure.get_rate, self._encoder.ever_got_incoming, self.internal_shutdown, self._announce_done) self._statuscollecter = DownloaderFeedback(choker, upmeasure.get_rate, upmeasure_seedtime.get_rate, downmeasure.get_rate, upmeasure.get_total, downmeasure.get_total, self._ratemeasure.get_time_left, self._ratemeasure.get_size_left, self.total_bytes, self.finflag, downloader, self._myfiles, self._encoder.ever_got_incoming, self._rerequest) self._announced = True if self._dht and len(self._dht.table.findNodes(self.infohash)) == 0: self._rawserver.add_task(self._dht.findCloseNodes, 5) self._rawserver.add_task(self._rerequest.begin, 20) else: self._rerequest.begin() self.started = True if not self.finflag.isSet(): self._activity = (_("downloading"), 0) self.feedback.started(self)