def _postrequest(self, data=None, failure=None): #self.errorfunc(logging.INFO, 'postrequest(%s): %s d:%s f:%s' % # (self.__class__.__name__, self.current_started, # bool(data), bool(failure))) self.current_started = None self.last_time = bttime() if self.dead: return if failure is not None: if failure.type == twisted.internet.error.TimeoutError: m = _("Timeout while contacting server.") else: m = failure.getErrorMessage() self.errorfunc(logging.WARNING, self._make_errormsg(m)) self._fail(failure.exc_info()) return try: r = bdecode(data) if LOG_RESPONSE: self.errorfunc(logging.INFO, 'tracker said: %r' % r) check_peers(r) except BTFailure, e: if data: self.errorfunc(logging.ERROR, _("bad data from tracker (%r)") % data, exc_info=sys.exc_info()) self._fail() return
def _scan(self): try: # asynchronous parse. df = async_parsedir(self.torrent_dir, self.torrent_cache, self.file_cache, self.blocked_files) yield df r = df.getResult() ( self.torrent_cache, self.file_cache, self.blocked_files, added, removed ) = r for infohash, (path, metainfo) in removed.items(): self.logger.info(_('dropped "%s"') % path) self.remove(infohash) for infohash, (path, metainfo) in added.items(): self.logger.info(_('added "%s"' ) % path) if self.config['launch_delay'] > 0: self.rawserver.add_task(self.config['launch_delay'], self.add, metainfo) # torrent may have been known from resume state. else: self.add(metainfo) except: self.logger.exception("scan threw exception") # register the call to parse a dir. self.rawserver.add_task(self.config['parse_dir_interval'], self.scan)
def _make_errormsg(self, msg): proxy = self.config.get('tracker_proxy', None) url = self.baseurl if proxy: url = _("%s through proxy %s") % (url, proxy) return (_("Problem connecting to tracker (%s): %s") % (url, msg))
def get_url(url): """Downloads the .torrent metainfo file specified by the passed URL and returns data, the raw contents of the metainfo file. Any exception raised while trying to obtain the metainfo file is caught and GetTorrent.URLException is raised instead. """ data = None err_str = ((_('Could not download or open "%s"')% url) + '\n' + _("Try using a web browser to download the torrent file.")) u = None # pending protocol changes, convert: # torrent://http://path.to/file # and: # bittorrent://http://path.to/file # to: # http://path.to/file url = urlpat_torrent.sub('', url) url = urlpat_bittorrent.sub('', url) try: u = zurllib.urlopen(url) data = u.read() u.close() except Exception, e: if u is not None: u.close() raise URLException(err_str + "\n(%s)" % e)
def erase(self, iter): """Remove item pointed to by the iterator. All iterators that point at the erased item including the passed iterator are immediately invalidated after the deletion completes. >>> from CMultiMap import CMultiMap >>> m = CMultiMap() >>> m[12] = 'foo' >>> i = m.find(12) >>> m.erase(i) >>> len(m) == 0 True """ if not iter._si: raise RuntimeError( _("invalid iterator") ) if iter._si == BEGIN: raise IndexError(_("Iterator does not point at key-value pair" )) if self is not iter._mmap: raise IndexError(_("Iterator points into a different CMultiMap.")) if mmap_iiter_at_end(self._smmap, iter._si): raise IndexError( _("Cannot erase end() iterator.") ) # invalidate iterators. for i in list(self._iterators): if iter._si is not i._si and iiter_cmp( self._smmap, iter._si, i._si ) == 0: i._invalidate() # remove item from the map. mmap_iiter_erase( self._smmap, iter._si ) # invalidate last iterator pointing to the deleted location in the map. iter._invalidate()
def bdecode(x): try: r, l = decode_func[x[0]](x, 0) except (IndexError, KeyError, ValueError): raise BTFailure, _("not a valid bencoded string") if l != len(x): raise BTFailure, _("invalid bencoded value (data after valid prefix)") return r
def huphandler(signum, frame, self = self): try: self.log.close () self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log reopened: "), isotime() except: print _("***warning*** could not reopen logfile")
def _remake_window(self): self.scrh, self.scrw = self.scrwin.getmaxyx() self.scrpan = curses.panel.new_panel(self.scrwin) self.mainwinh = (2*self.scrh)//3 self.mainwinw = self.scrw - 4 # - 2 (bars) - 2 (spaces) self.mainwiny = 2 # + 1 (bar) + 1 (titles) self.mainwinx = 2 # + 1 (bar) + 1 (space) # + 1 to all windows so we can write at mainwinw self.mainwin = curses.newwin(self.mainwinh, self.mainwinw+1, self.mainwiny, self.mainwinx) self.mainpan = curses.panel.new_panel(self.mainwin) self.mainwin.scrollok(0) self.mainwin.nodelay(1) self.mainwin.clearok(1) self.headerwin = curses.newwin(1, self.mainwinw+1, 1, self.mainwinx) self.headerpan = curses.panel.new_panel(self.headerwin) self.headerwin.scrollok(0) self.headerwin.clearok(0) self.totalwin = curses.newwin(1, self.mainwinw+1, self.mainwinh+1, self.mainwinx) self.totalpan = curses.panel.new_panel(self.totalwin) self.totalwin.scrollok(0) self.totalwin.clearok(0) self.statuswinh = self.scrh-4-self.mainwinh self.statuswin = curses.newwin(self.statuswinh, self.mainwinw+1, self.mainwinh+3, self.mainwinx) self.statuspan = curses.panel.new_panel(self.statuswin) self.statuswin.scrollok(0) self.statuswin.clearok(1) try: self.scrwin.border(ord('|'),ord('|'),ord('-'),ord('-'),ord(' '),ord(' '),ord(' '),ord(' ')) except: pass rcols = (_("Size"),_("Download"),_("Upload")) rwids = (9, 11, 11) rwid = sum(rwids) start = self.mainwinw - rwid self.headerwin.addnstr(0, 2, '#', start, curses.A_BOLD) self.headerwin.addnstr(0, 4, _("Filename"), start, curses.A_BOLD) for s,w in zip(rcols, rwids): st = start + max(w - len(s), 0) self.headerwin.addnstr(0, st, s[:w], len(s[:w]), curses.A_BOLD) start += w self.totalwin.addnstr(0, self.mainwinw - 29, _("Totals:"), 7, curses.A_BOLD) self._display_messages() curses.panel.update_panels() curses.doupdate() self.changeflag.clear()
def parse_options(defaults, newvalues, encoding): """Given the type provided by the default value, this tries to cast/convert the corresponding newvalue to the type of the default value. By calling eval() on it, in some cases! Entertainly, newvalue sometimes holds strings and, apparently, sometimes holds values which have already been cast appropriately. This function is like a boat made of shit, floating on a river of shit. @param defaults: dict of key-value pairs where value is the default. @param newvalues: dict of key-value pairs which override the default. """ assert type(defaults) == dict assert type(newvalues) == dict for key, value in newvalues.iteritems(): if not defaults.has_key(key): raise UsageException(_("unknown option ") + format_key(key)) try: t = type(defaults[key]) if t is bool: if value in ('True', '1', True): value = True else: value = False newvalues[key] = value elif t in (StringType, NoneType): # force ASCII newvalues[key] = value.decode('ascii').encode('ascii') elif t in (IntType, LongType): if value == 'False': newvalues[key] == 0 elif value == 'True': newvalues[key] == 1 else: newvalues[key] = int(value) elif t is FloatType: newvalues[key] = float(value) elif t in (ListType, TupleType, DictType): if type(value) == StringType: try: n = eval(value) assert type(n) == t newvalues[key] = n except: newvalues[key] = t() elif t is UnicodeType: if type(value) == StringType: try: newvalues[key] = value.decode(encoding) except: newvalues[key] = value.decode('ascii') else: raise TypeError, str(t) except ValueError, e: raise UsageException(_("wrong format of %s - %s") % (format_key(key), str_exc(e)))
def fmttime(n): if n <= 0: return None n = int(n) m, s = divmod(n, 60) h, m = divmod(m, 60) if h > 1000000: return _("connecting to peers") return _("ETA in %d:%02d:%02d") % (h, m, s)
def async_parsedir(directory, parsed, files, blocked, include_metainfo=True): """Recurses breadth-first starting from the passed 'directory' looking for .torrrent files. async_parsedir differs from parsedir in three ways: it is non-blocking, it returns a deferred, and it reports all errors to the logger BTL.parsedir meaning it does not use an errfunc. The directory, parsed, files, and blocked arguments are passed from the previous iteration of parsedir. @param directory: root of the breadth-first search for .torrent files. @param parsed: dict mapping infohash to (path,ConvertedMetainfo). @param files: dict mapping path -> [(modification time, size), infohash] @param blocked: dict used as set. keys are list of paths of files that were not parsed on a prior call to parsedir for some reason. Valid reasons are that the .torrent file is unparseable or that a torrent with a matching infohash is alread in the parsed set. @param include_metainfo: deprecated? @return: The tuple (new parsed, new files, new blocked, added, removed) where 'new parsed', 'new files', and 'new blocked' are updated versions of 'parsed', 'files', and 'blocked' respectively. 'added' and 'removed' contain the changes made to the first three members of the tuple. 'added' and 'removed' are dicts mapping from infohash on to the same torrent-specific info dict that is in or was in parsed. """ log.info('async_parsedir %s' % directory ) dirs_to_check = [directory] new_files = {} # maps path -> [(modification time, size),infohash] new_blocked = {} # used as a set. while dirs_to_check: # first, recurse directories and gather torrents directory = dirs_to_check.pop() if NOISY: log.info( "parsing directory %s" % directory ) try: df = dtt(os.listdir,directory) yield df dir_contents = df.getResult() except (IOError, OSError), e: log.error(_("Could not read directory ") + directory) continue for f in dir_contents: if f.endswith('.torrent'): p = os.path.join(directory, f) try: df = dtt(os.path.getmtime,p) yield df tmt = df.getResult() df = dtt(os.path.getsize,p) yield df sz = df.getResult() new_files[p] = [(tmt,sz),0] except (IOError, OSError), e: log.error(_("Could not stat ") + p + " : " + unicode(e.args[0]))
def append(self, k, v): """Performs an insertion with the hint that it probably should go at the end. Raises KeyError if the key is already in the map. >>> from PMap import PIndexedMap >>> m = PIndexedMap() >>> m.append(5,'foo') >>> m {5: 'foo'} >>> m.append(10, 'bar') >>> m {5: 'foo', 10: 'bar'} >>> m.append(3, 'coo') # out-of-order. >>> m {3: 'coo', 5: 'foo', 10: 'bar'} >>> m.get_key_by_value( 'bar' ) 10 >>> try: ... m.append(10, 'blah') # append key already in map. ... except KeyError: ... print 'ok' ... ok >>> m {3: 'coo', 5: 'foo', 10: 'bar'} >>> try: ... m.append(10, 'coo') # append value already in map. ... except ValueError: ... print 'ok' ... ok """ # if value is already in the map then throw an error. try: if self._value_index[v] != k: raise ValueError( _("Value is already in the map. " "Both values and keys must be unique.") ) except KeyError: # values was not in the cross index. pass if self._index.has_key(k): raise KeyError( _("Key is already in the map. Both values and " "keys must be unique.") ) if len(self._olist) == 0 or k > self._olist[len(self._olist)-1].k: self._olist.append(PIndexedMap.Item(k,v)) else: insort_left(self._olist, PIndexedMap.Item(k,v)) self._value_index[v] = k self._index[k] = v
def shutdown(): self.logger.critical(_("shutting down")) for t in self.multitorrent.get_torrents(): self.logger.info(_('dropped "%s"') % self.torrent_cache[t.infohash][0]) if self.multitorrent: df = self.multitorrent.shutdown() set_flag = lambda *a : self.rawserver.stop() df.addCallbacks(set_flag, set_flag) else: self.rawserver.stop()
def __init__(self, parent, path='', setfunc=None, editable=True, dialog_title=_("Choose a file..."), button_label=_("&Browse..."), wildcard=_("All files (*.*)|*.*"), dialog_style=wx.OPEN): ChooseDirectorySizer.__init__(self, parent, path=path, setfunc=setfunc, editable=editable, dialog_title=dialog_title, button_label=button_label) self.wildcard = wildcard self.dialog_style = dialog_style
def __init__(self, parent, file_list, announce, piece_length, title, comment, config): BTDialog.__init__(self, parent=parent, size=(400,-1)) self.parent = parent self.SetTitle(_("Building torrents...")) self.file_list = file_list self.announce = deunicode(announce) self.piece_length = piece_length self.title = deunicode(title) self.comment = deunicode(comment) self.config = config self.flag = Event() # ??? self.vbox = VSizer() self.label = wx.StaticText(self, label=_("Checking file sizes...")) #self.label.set_line_wrap(True) self.vbox.AddFirst(self.label, flag=wx.ALIGN_LEFT) self.progressbar = wx.Gauge(self, range = 1000, size=(400, 25), style = wx.GA_SMOOTH) self.vbox.Add(self.progressbar, flag=wx.GROW) self.vbox.Add(wx.StaticLine(self, style=wx.LI_HORIZONTAL), flag=wx.GROW) self.action_area = wx.BoxSizer(wx.HORIZONTAL) self.cancelbutton = wx.Button(self, label=_("&Abort")) self.cancelbutton.Bind(wx.EVT_BUTTON, self.cancel) self.action_area.Add(self.cancelbutton, flag=wx.LEFT|wx.RIGHT|wx.BOTTOM, border=SPACING) self.done_button = wx.Button(self, label=_("&Ok")) self.done_button.Bind(wx.EVT_BUTTON, self.cancel) self.action_area.Add(self.done_button, flag=wx.LEFT|wx.RIGHT|wx.BOTTOM, border=SPACING) self.action_area.Show(self.done_button, False) self.seed_button = wx.Button(self, label=_("&Start seeding")) self.seed_button.Bind(wx.EVT_BUTTON, self.seed) self.action_area.Add(self.seed_button, flag=wx.RIGHT|wx.BOTTOM, border=SPACING) self.action_area.Show(self.seed_button, False) self.Bind(wx.EVT_CLOSE, self.cancel) self.vbox.Add(self.action_area, flag=wx.ALIGN_RIGHT, border=0) self.SetSizerAndFit(self.vbox) self.Show()
def create_torrent(self, metainfo, save_incomplete_as, save_as, hidden=False, is_auto_update=False, feedback=None): if self.is_single_torrent and len(self.torrents) > 0: raise TooManyTorrents(_("MultiTorrent is set to download only " "a single torrent, but tried to create more than one.")) #save_as, junk = encode_for_filesystem(save_as) #save_incomplete_as, junk = encode_for_filesystem(save_incomplete_as) infohash = metainfo.infohash if self.torrent_known(infohash): if self.torrent_running(infohash): msg = _("This torrent (or one with the same contents) is " "already running.") raise TorrentAlreadyRunning(msg) else: raise TorrentAlreadyInQueue(_("This torrent (or one with " "the same contents) is " "already waiting to run.")) self._dump_metainfo(metainfo) #BUG. Use _read_torrent_config for 5.0? --Dave config = configfile.read_torrent_config(self.config, self.data_dir, infohash, lambda s : self.global_error(logging.ERROR, s)) t = Torrent(metainfo, save_incomplete_as, save_as, self.config, self.data_dir, self.rawserver, self.choker, self.singleport_listener, self.up_ratelimiter, self.down_ratelimiter, self.total_downmeasure, self.filepool, self.dht, self, self.log_root, hidden=hidden, is_auto_update=is_auto_update) if feedback: t.add_feedback(feedback) retdf = Deferred() def torrent_started(*args): if config: t.update_config(config) t._dump_torrent_config() if self.resume_from_torrent_config: self._dump_torrents() t.metainfo.show_encoding_errors(self.logger.log) retdf.callback(t) df = self._init_torrent(t, use_policy=False) df.addCallback(torrent_started) return retdf
def track(args): assert type(args) == list and \ len([x for x in args if type(x)==str])==len(args) config = {} defaults = get_defaults('bittorrent-tracker') # hard-coded defaults. try: config, files = parse_configuration_and_args(defaults, 'bittorrent-tracker', args, 0, 0 ) except ValueError, e: print _("error: ") + str_exc(e) print _("run with -? for parameter explanations") return
def value(self): """@return: the value of the key-value pair currently referenced by this iterator. """ if not self._si: raise RuntimeError( _("invalid iterator") ) if self._si == BEGIN: raise IndexError(_("Cannot dereference iterator until after " "first call to next.")) elif mmap_iiter_at_end(self._mmap._smmap, self._si): raise IndexError() return iiter_value(self._si)
def parsedir(directory, parsed, files, blocked, errfunc, include_metainfo=True): """Recurses breadth-first starting from the passed 'directory' looking for .torrrent files. THIS IS BLOCKING. Run this in a thread if you don't want it to block the program. Or better yet, use async_parsedir. The directory, parsed, files, and blocked arguments are passed from the previous iteration of parsedir. @param directory: root of the breadth-first search for .torrent files. @param parsed: dict mapping infohash to (path,ConvertedMetainfo). @param files: dict mapping path -> [(modification time, size), infohash] @param blocked: dict used as set. keys are list of paths of files that were not parsed on a prior call to parsedir for some reason. Valid reasons are that the .torrent file is unparseable or that a torrent with a matching infohash is alread in the parsed set. @param errfunc: error-reporting callback. @param include_metainfo: deprecated? @return: The tuple (new parsed, new files, new blocked, added, removed) where 'new parsed', 'new files', and 'new blocked' are updated versions of 'parsed', 'files', and 'blocked' respectively. 'added' and 'removed' contain the changes made to the first three members of the tuple. 'added' and 'removed' are dicts mapping from infohash on to the same torrent-specific info dict that is in or was in parsed. """ if NOISY: errfunc('checking dir') dirs_to_check = [directory] new_files = {} # maps path -> [(modification time, size),infohash] new_blocked = {} # used as a set. while dirs_to_check: # first, recurse directories and gather torrents directory = dirs_to_check.pop() errfunc( "parsing directory %s" % directory ) try: dir_contents = os.listdir(directory) except (IOError, OSError), e: errfunc(_("Could not read directory ") + directory) continue for f in dir_contents: if f.endswith('.torrent'): p = os.path.join(directory, f) try: new_files[p] = [(os.path.getmtime(p),os.path.getsize(p)),0] except (IOError, OSError), e: errfunc(_("Could not stat ") + p + " : " + unicode(e.args[0]))
def erase(self, iter): """Remove item pointed to by the iterator. Iterator is immediately invalidated after the deletion completes.""" if not iter._si: raise RuntimeError( _("invalid iterator") ) if iter._si == BEGIN: raise IndexError(_("Iterator does not point at key-value pair." )) if self is not iter._map: raise IndexError(_("Iterator points into a different " "CIndexedMap.")) if map_iter_at_end(self._smap, iter._si): raise IndexError( _("Cannot update end() iterator.") ) value = iter.value() CMap.erase(self,iter) del self._value_index[value]
def create(self): filename = self.socket_filename if os.path.exists(filename): try: self.send_command('no-op') except BTFailure: pass else: raise BTFailure(_("Could not create control socket: already in use")) try: os.unlink(filename) except OSError, e: raise BTFailure(_("Could not remove old control socket filename:") + str_exc(e))
def shutdown(): self.logger.critical(_("shutting down")) if self.multitorrent: if len(self.multitorrent.get_torrents()) > 0: for t in self.multitorrent.get_torrents(): self.logger.info(_('dropped "%s"') % self.torrent_cache[t.infohash][0]) def after_mt(r): self.logger.critical("multitorrent shutdown completed. Calling rawserver.stop") self.rawserver.stop() self.logger.critical( "calling multitorrent shutdown" ) df = self.multitorrent.shutdown() #set_flag = lambda *a : self.rawserver.stop() df.addCallbacks(after_mt, after_mt) else: self.rawserver.stop()
def display(self, data): try: if self.changeflag.isSet(): return inchar = self.mainwin.getch() if inchar == 12: # ^L self._remake_window() self.mainwin.erase() if data: self._display_data(data) else: self.mainwin.addnstr(1, self.mainwinw // 2 - 5, _("no torrents"), 12, curses.A_BOLD) totalup = 0 totaldn = 0 for ( name, status, progress, peers, seeds, seedsmsg, # dist, uprate, dnrate, upamt, dnamt, size, t, msg, ) in data: totalup += uprate totaldn += dnrate totalup = "%s/s" % fmtsize(totalup) totaldn = "%s/s" % fmtsize(totaldn) self.totalwin.erase() self.totalwin.addnstr(0, self.mainwinw - 27, _("Totals:"), 7, curses.A_BOLD) self.totalwin.addnstr(0, self.mainwinw - 20 + (10 - len(totaldn)), totaldn, 10, curses.A_BOLD) self.totalwin.addnstr(0, self.mainwinw - 10 + (10 - len(totalup)), totalup, 10, curses.A_BOLD) curses.panel.update_panels() curses.doupdate() except: pass return inchar in (ord("q"), ord("Q"))
def __cmp__(self, other): if not self._si or not other._si: raise RuntimeError( _("invalid iterator") ) if self._si == BEGIN and other._si == BEGIN: return 0 if self._si == BEGIN and other._si != BEGIN: return -1 elif self._si != BEGIN and other._si == BEGIN: return 1 return iiter_cmp(self._mmap._smmap, self._si, other._si )
def at_begin(self): """equivalent to self == m.begin() where m is a CMultiMap. >>> from CMultiMap import CMultiMap >>> m = CMultiMap() >>> i = m.begin() >>> i == m.begin() True >>> i.at_begin() True >>> i == m.end() # no elements so begin()==end() True >>> i.at_end() True >>> m[6] = 'foo' # insertion does not invalidate iterators. >>> i = m.begin() >>> i == m.end() False >>> i.value() 'foo' >>> try: # test at_begin when not at beginning. ... i.next() ... except StopIteration: ... print 'ok' ok >>> i.at_begin() False """ if not self._si: raise RuntimeError( _("invalid iterator") ) if self._si == BEGIN: # BEGIN is one before begin(). Yuck!! return False return mmap_iiter_at_begin(self._mmap._smmap, self._si)
def _restore_state(self, init_torrents): def decode_line(line): hashtext = line[:40] try: infohash = InfoHashType(hashtext.decode('hex')) except: raise BTFailure(_("Invalid state file contents")) if len(infohash) != 20: raise BTFailure(_("Invalid state file contents")) if infohash in self.torrents: raise BTFailure(_("Invalid state file (duplicate entry)")) try: metainfo = self._read_metainfo(infohash) except OSError, e: try: f.close() except: pass self.logger.error((_("Error reading metainfo file \"%s\".") % hashtext) + " (" + str_exc(e)+ "), " + _("cannot restore state completely")) return None except Exception, e: self.logger.error((_("Corrupt data in metainfo \"%s\", cannot restore torrent.") % hashtext) + '('+str_exc(e)+')') return None
def formatDefinitions(options, COLS): s = u'' indent = u" " * 10 width = COLS - 11 if width < 15: width = COLS - 2 indent = " " for option in options: (longname, default, doc) = option if doc == '': continue s += u'--' + longname is_boolean = type(default) is bool if is_boolean: s += u', --no_' + longname else: s += u' <arg>' s += u'\n' if default is not None: doc += _(u" (defaults to ") + repr(default) + u')' i = 0 for word in doc.split(): if i == 0: s += indent + word i = len(word) elif i + len(word) >= width: s += u'\n' + indent + word i = len(word) else: s += u' ' + word i += len(word) + 1 s += u'\n\n' return s
def _batch_read(self, pos, amount): dfs = [] r = [] # queue all the reads for filename, pos, end in self._intervals(pos, amount): df = self._file_op(filename, pos, end - pos, write=False) dfs.append(df) # yield on all the reads in order - they complete in any order exc = None for df in dfs: yield df try: r.append(df.getResult()) except: exc = exc or sys.exc_info() if exc: raise exc[0], exc[1], exc[2] r = ''.join(r) if len(r) != amount: raise BTFailure(_("Short read (%d of %d) - " "something truncated files?") % (len(r), amount)) yield r
def add_files(self, files, torrent): for filename in files: if filename in self.file_to_torrent: raise BTFailure(_("File %s belongs to another running torrent") % filename) for filename in files: self.file_to_torrent[filename] = torrent
def _find_port(self, listen_fail_ok=True): """Run BitTorrent on the first available port found starting from minport in the range [minport, maxport].""" exc_info = None self.config['minport'] = max(1024, self.config['minport']) self.config['maxport'] = max(self.config['minport'], self.config['maxport']) e = (_("maxport less than minport - no ports to check") + (": %s %s" % (self.config['minport'], self.config['maxport']))) for port in xrange(self.config['minport'], self.config['maxport'] + 1): try: self.singleport_listener.open_port(port, self.config) if self.config['start_trackerless_client']: self.dht = UTKhashmir(self.config['bind'], self.singleport_listener.get_port(), self.data_dir, self.rawserver, int(self.config['max_upload_rate'] * 0.01), rlcount=self.up_ratelimiter.increase_offset, config=self.config) break except socket.error, e: exc_info = sys.exc_info()
def remove(self, infohash): df = self.multitorrent.remove_torrent(infohash) df.addCallback(lambda *a: self.was_stopped(infohash)) df.addErrback( lambda e: self.logger.error(_("Remove failed: "), exc_info=e))
# defaultargs.py is even more annoying. --Dave ddir = os.path.join( platform.get_dot_dir(), "launchmany-console" ) ddir = decode_from_filesystem(ddir) modify_default(defaults, 'data_dir', ddir) config, args = configfile.parse_configuration_and_args(defaults, uiname, sys.argv[1:], 0, 1) # returned from here config['save_in'] is /home/dave/Desktop/... if args: torrent_dir = args[0] config['torrent_dir'] = decode_from_filesystem(torrent_dir) else: torrent_dir = config['torrent_dir'] torrent_dir,bad = encode_for_filesystem(torrent_dir) if bad: raise BTFailure(_("Warning: ")+config['torrent_dir']+ _(" is not a directory")) if not os.path.isdir(torrent_dir): raise BTFailure(_("Warning: ")+torrent_dir+ _(" is not a directory")) # the default behavior is to save_in files to the platform # get_save_dir. For launchmany, if no command-line argument # changed the save directory then use the torrent directory. #if config['save_in'] == platform.get_save_dir(): # config['save_in'] = config['torrent_dir'] if '--save_in' in sys.argv: print "Don't use --save_in for launchmany-console. Saving files from " \ "many torrents in the same directory can result in filename collisions." sys.exit(1)
def __init__(self, config, display, configfile_key): """Starts torrents for all .torrent files in a directory tree. All errors are logged using Python logging to 'configfile_key' logger. @param config: Preferences object storing config. @param display: output function for stats. """ # 4.4.x version of LaunchMany output exceptions to a displayer. # This version only outputs stats to the displayer. We do not use # the logger to output stats so that a caller-provided object # can provide stats formatting as opposed to using the # logger Formatter, which is specific to exceptions, warnings, and # info messages. self.logger = logging.getLogger(configfile_key) try: self.multitorrent = None self.rawserver = None self.config = config self.configfile_key = configfile_key self.display = display self.torrent_dir = efs2(config['torrent_dir']) # Ex: torrent_cache = infohash -> (path,metainfo) self.torrent_cache = {} # maps path -> [(modification time, size), infohash] self.file_cache = {} # used as set containing paths of files that do not have separate # entries in torrent_cache either because torrent_cache already # contains the torrent or because the torrent file is corrupt. self.blocked_files = {} #self.torrent_list = [] #self.downloads = {} self.hashcheck_queue = [] #self.hashcheck_store = {} self.hashcheck_current = None self.core_doneflag = DeferredEvent() self.rawserver = RawServer(self.config) try: # set up shut-down procedure before we begin doing things that # can throw exceptions. def shutdown(): self.logger.critical(_("shutting down")) if self.multitorrent: if len(self.multitorrent.get_torrents()) > 0: for t in self.multitorrent.get_torrents(): self.logger.info( _('dropped "%s"') % self.torrent_cache[t.infohash][0]) def after_mt(r): self.logger.critical( "multitorrent shutdown completed. Calling rawserver.stop" ) self.rawserver.stop() self.logger.critical("calling multitorrent shutdown") df = self.multitorrent.shutdown() #set_flag = lambda *a : self.rawserver.stop() df.addCallbacks(after_mt, after_mt) else: self.rawserver.stop() ### PROFILER POSTPROCESSING. #self.logger.critical( "Disabling profiles" ) #prof.disable() #self.logger.critical( "Running profiler post-processing" ) #stats = Stats(prof.getstats()) #stats.sort("inlinetime") #self.logger.info( "Calling stats.pprint") #stats.pprint() #self.logger.info( "After stats.pprint") ### PROFILER POSTPROCESSING # It is safe to addCallback here, because there is only one thread, # but even if the code were multi-threaded, core_doneflag has not # been passed to anyone. There is no chance of a race condition # between the DeferredEvent's callback and addCallback. self.core_doneflag.addCallback( lambda r: self.rawserver.external_add_task(0, shutdown)) self.rawserver.install_sigint_handler(self.core_doneflag) data_dir = config['data_dir'] self.multitorrent = MultiTorrent( config, self.rawserver, data_dir, resume_from_torrent_config=False) self.rawserver.add_task(0, self.scan) self.rawserver.add_task(0.5, self.periodic_check_hashcheck_queue) self.rawserver.add_task(self.config['display_interval'], self.periodic_stats) try: import signal def handler(signum, frame): self.rawserver.external_add_task(0, self.read_config) if hasattr(signal, 'SIGHUP'): signal.signal(signal.SIGHUP, handler) except Exception, e: self.logger.error( _("Could not set signal handler: ") + str_exc(e)) self.rawserver.add_task(0, self.core_doneflag.set) except UserFailure, e: self.logger.error(str_exc(e)) self.rawserver.add_task(0, self.core_doneflag.set) except: #data = StringIO() #print_exc(file = data) #self.logger.error(data.getvalue()) self.logger.exception( "Exception raised while initializing LaunchMany") self.rawserver.add_task(0, self.core_doneflag.set) # always make sure events get processed even if only for # shutting down. self.rawserver.listen_forever() self.logger.info("After rawserver.listen_forever")
class StatusLight(object): initial_state = 'stopped' states = { # state : (stock icon name, label, tool tip), 'stopped': ('stopped', _("Paused"), _("Paused")), 'empty': ('stopped', _("No torrents"), _("No torrents")), 'starting': ( 'starting', _("Checking for firewall..."), #_("Starting up..."), _("Starting download")), 'pre-natted': ('pre-natted', _("Checking for firewall..."), _("Online, checking for firewall")), 'running': ('running', _("Online, ports open"), _("Online, running normally")), 'natted': ('natted', _("Online, maybe firewalled"), _("Online, but downloads may be slow due to firewall/NAT")), 'broken': ('broken', _("No network connection"), _("Check network connection")), } messages = { # message : default new state, 'stop': 'stopped', 'empty': 'empty', 'start': 'starting', 'seen_peers': 'pre-natted', 'seen_remote_peers': 'running', 'broken': 'broken', } transitions = { # state : { message : custom new state, }, 'pre-natted': { 'start': 'pre-natted', 'seen_peers': 'pre-natted', }, 'running': { 'start': 'running', 'seen_peers': 'running', }, 'natted': { 'start': 'natted', 'seen_peers': 'natted', }, 'broken': { 'start': 'broken', }, #TODO: add broken transitions } time_to_nat = 60 * 5 # 5 minutes def __init__(self): self.mystate = self.initial_state self.start_time = None def send_message(self, message): if message not in self.messages.keys(): #print 'bad message', message return new_state = self.messages[message] if self.transitions.has_key(self.mystate): if self.transitions[self.mystate].has_key(message): new_state = self.transitions[self.mystate][message] # special pre-natted timeout logic if new_state == 'pre-natted': if (self.mystate == 'pre-natted' and bttime() - self.start_time > self.time_to_nat): # go to natted state after a while new_state = 'natted' elif self.mystate != 'pre-natted': # start pre-natted timer self.start_time = bttime() if new_state != self.mystate: #print 'changing state from', self.mystate, 'to', new_state self.mystate = new_state self.change_state() def change_state(self): pass def get_tip(self): return self.states[self.mystate][2] def get_label(self): return self.states[self.mystate][1]
def draw(self, dc, size): s_rect = wx.Rect(0, 0, size.width, size.height) elements = list(self.history.upload_data) + list( self.history.download_data) max_data = max(elements + [self.MIN_MAX_DATA]) interval = self.history.interval / 1000 seconds = self.history.max_len * interval time_label_text = "%d" % seconds + _(" seconds, ") time_label_text += str(interval) + _(" second interval") dr_label_text = _("Download rate") ur_label_text = _("Upload rate") text_color = wx.NamedColour("light gray") border_color = wx.NamedColour("gray") dr_color = wx.NamedColor("green") ur_color = wx.NamedColor("slate blue") size = 8 if sys.platform == "darwin": size = 10 dc.SetFont(wx.Font(size, wx.DEFAULT, wx.NORMAL, wx.NORMAL)) dc.SetBackground(wx.Brush(self.GetBackgroundColour())) dc.Clear() if (self.max_label_width == None): self.max_label_width = dc.GetTextExtent(unicode( Rate(1000000.0)))[0] self.max_label_width = max( self.max_label_width, dc.GetTextExtent(unicode(Rate(max_data)))[0]) top_label_height = dc.GetTextExtent(unicode(Rate(max_data)))[1] bottom_label_height = dc.GetTextExtent(unicode(Rate(0.0)))[1] time_label_width = dc.GetTextExtent(unicode(time_label_text))[0] time_label_height = dc.GetTextExtent(unicode(time_label_text))[1] dr_label_width = dc.GetTextExtent(unicode(dr_label_text))[0] dr_label_height = dc.GetTextExtent(unicode(dr_label_text))[1] ur_label_width = dc.GetTextExtent(unicode(ur_label_text))[0] ur_label_height = dc.GetTextExtent(unicode(ur_label_text))[1] label_spacer = 4 b_spacer = 15 legend_box_size = 10 legend_guts_height = max( (legend_box_size, ur_label_height, dr_label_height)) legend_height = legend_guts_height + b_spacer x1 = b_spacer y1 = b_spacer x2 = max( x1, s_rect.GetRight() - (label_spacer + self.max_label_width + label_spacer)) y2 = max( y1 + top_label_height + SPACING + bottom_label_height, s_rect.GetBottom() - (label_spacer + time_label_height + label_spacer + legend_height)) b_rect = wx.RectPP(wx.Point(x1, y1), wx.Point(x2, y2)) x1 = b_spacer + b_spacer y1 = max((b_rect.GetBottom() + label_spacer + time_label_height + label_spacer), s_rect.GetBottom() - (legend_box_size + b_spacer)) x2 = x1 + legend_box_size y2 = y1 + legend_box_size db_rect = wx.RectPP(wx.Point(x1, y1), wx.Point(x2, y2)) x1 = db_rect.GetRight() + label_spacer + dr_label_width + b_spacer y1 = db_rect.y x2 = x1 + legend_box_size y2 = y1 + legend_box_size ub_rect = wx.RectPP(wx.Point(x1, y1), wx.Point(x2, y2)) x1 = min(b_rect.x + 1, b_rect.GetRight()) y1 = min(b_rect.y + 1, b_rect.GetBottom()) x2 = max(x1, b_rect.GetRight() - 1) y2 = max(y1, b_rect.GetBottom() - 1) i_rect = wx.RectPP(wx.Point(x1, y1), wx.Point(x2, y2)) bw_label_x = b_rect.GetRight() + label_spacer time_label_x = max(b_rect.x, (b_rect.GetRight() / 2) - (time_label_width / 2)) dc.SetTextForeground(text_color) dc.DrawText(unicode(Rate(max_data)), bw_label_x, b_rect.y) dc.DrawText(unicode(Rate(0.0)), bw_label_x, b_rect.GetBottom() - bottom_label_height) dc.DrawText(unicode(time_label_text), time_label_x, b_rect.GetBottom() + label_spacer) dc.DrawText(unicode(dr_label_text), db_rect.GetRight() + label_spacer, db_rect.y + (legend_box_size / 2) - (dr_label_height / 2)) dc.DrawText(unicode(ur_label_text), ub_rect.GetRight() + label_spacer, ub_rect.y + (legend_box_size / 2) - (ur_label_height / 2)) pen = wx.Pen(border_color, 1, wx.SOLID) dc.SetPen(pen) brush = wx.Brush(dr_color) dc.SetBrush(brush) dc.DrawRectangle(db_rect.x, db_rect.y, db_rect.GetWidth(), db_rect.GetHeight()) brush = wx.Brush(ur_color) dc.SetBrush(brush) dc.DrawRectangle(ub_rect.x, ub_rect.y, ub_rect.GetWidth(), ub_rect.GetHeight()) dc.DrawLine(b_rect.x, b_rect.y, b_rect.GetRight(), b_rect.y) dc.DrawLine(b_rect.x, b_rect.y, b_rect.x, b_rect.GetBottom()) dc.DrawLine(b_rect.x, b_rect.GetBottom(), b_rect.GetRight(), b_rect.GetBottom()) dc.DrawLine(b_rect.GetRight(), b_rect.y, b_rect.GetRight(), b_rect.GetBottom()) pen = wx.Pen(border_color, 1, wx.DOT) dc.SetPen(pen) dc.DrawLine(i_rect.x, i_rect.y + (i_rect.height * 0.75), i_rect.GetRight(), i_rect.y + (i_rect.height * 0.75)) dc.DrawLine(i_rect.x, i_rect.y + (i_rect.height * 0.5), i_rect.GetRight(), i_rect.y + (i_rect.height * 0.5)) dc.DrawLine(i_rect.x, i_rect.y + (i_rect.height * 0.25), i_rect.GetRight(), i_rect.y + (i_rect.height * 0.25)) pen = wx.Pen(ur_color, 1, wx.SHORT_DASH) dc.SetPen(pen) self.draw_max_line(dc, self.history.max_upload_rate, max_data, i_rect, offset=3) pen = wx.Pen(ur_color, 1, wx.SOLID) dc.SetPen(pen) self.draw_graph(dc, self.history.max_len, self.history.upload_data, max_data, i_rect) pen = wx.Pen(dr_color, 1, wx.SHORT_DASH) dc.SetPen(pen) self.draw_max_line(dc, self.history.max_download_rate, max_data, i_rect) pen = wx.Pen(dr_color, 1, wx.SOLID) dc.SetPen(pen) self.draw_graph(dc, self.history.max_len, self.history.download_data, max_data, i_rect) ## variance line if wx.the_app.config['show_variance_line']: pen = wx.Pen(wx.NamedColor("yellow"), 1, wx.SHORT_DASH) dc.SetPen(pen) self.draw_max_line(dc, self.history.variance, self.history.max_variance, i_rect)
def listen_once(self, period=1e9): rawserver_logger.warning( _("listen_once() might not return until there " "is activity, and might not process the " "event you want. Use listen_forever().")) reactor.iterate(period)
): # a list of files, their attributes and info hashes raise ValueError dirkeys = {} for y in cinfo.itervalues( ): # each entry should have a corresponding info_hash if not y[1]: continue if not x['allowed'].has_key(y[1]): raise ValueError if dirkeys.has_key( y[1]): # and each should have a unique info_hash raise ValueError dirkeys[y[1]] = 1 alas = _("your file may exist elsewhere in the universe\nbut alas, not here\n") def isotime(): #return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs)) return datetime.datetime.utcnow().isoformat() http_via_filter = re.compile(' for ([0-9.]+)\Z') def _get_forwarded_ip(headers): if headers.has_key('x_forwarded_for'): header = headers['x_forwarded_for'] try: x, y = header.split(',')
class HTTPConnector(object): def __init__(self, handler, connection): self.handler = handler self.connection = connection self.buf = '' self.closed = False self.done = False self.donereading = False self.next_func = self.read_type def get_ip(self): return self.connection.ip def data_came_in(self, data): if self.donereading or self.next_func is None: return True self.buf += data while True: try: i = self.buf.index('\n') except ValueError: return True val = self.buf[:i] self.buf = self.buf[i + 1:] self.next_func = self.next_func(val) if self.donereading: return True if self.next_func is None or self.closed: return False def read_type(self, data): self.header = data.strip() words = data.split() if len(words) == 3: self.command, self.path, garbage = words self.pre1 = False elif len(words) == 2: self.command, self.path = words self.pre1 = True if self.command != 'GET': return None else: return None if self.command not in ('HEAD', 'GET'): return None self.headers = {} return self.read_header def read_header(self, data): data = data.strip() if data == '': self.donereading = True # check for Accept-Encoding: header, pick a if self.headers.has_key('accept-encoding'): ae = self.headers['accept-encoding'] if DEBUG: print "Got Accept-Encoding: " + ae + "\n" else: #identity assumed if no header ae = 'identity' # this eventually needs to support multple acceptable types # q-values and all that fancy HTTP crap # for now assume we're only communicating with our own client if ae.find('gzip') != -1: self.encoding = 'gzip' else: #default to identity. self.encoding = 'identity' r = self.handler.getfunc(self, self.path, self.headers) if r is not None: self.answer(r) return None try: i = data.index(':') except ValueError: return None self.headers[data[:i].strip().lower()] = data[i + 1:].strip() if DEBUG: print data[:i].strip() + ": " + data[i + 1:].strip() return self.read_header def answer(self, (responsecode, responsestring, headers, data)): if self.closed: return if self.encoding == 'gzip': #transform data using gzip compression #this is nasty but i'm unsure of a better way at the moment compressed = StringIO() gz = GzipFile(fileobj=compressed, mode='wb', compresslevel=9) gz.write(data) gz.close() compressed.seek(0, 0) cdata = compressed.read() compressed.close() if len(cdata) >= len(data): self.encoding = 'identity' else: if DEBUG: print _("Compressed: %i Uncompressed: %i\n") % ( len(cdata), len(data)) data = cdata headers['Content-Encoding'] = 'gzip' # i'm abusing the identd field here, but this should be ok if self.encoding == 'identity': ident = '-' else: ident = self.encoding username = '******' referer = self.headers.get('referer', '-') useragent = self.headers.get('user-agent', '-') timestamp = datetime.datetime.utcnow().isoformat() if DEBUG: print '%s %s %s [%s] "%s" %i %i "%s" "%s"' % ( self.connection.ip, ident, username, timestamp, self.header, responsecode, len(data), referer, useragent) t = time.time() if t - self.handler.lastflush > self.handler.minflush: self.handler.lastflush = t stdout.flush() self.done = True r = StringIO() r.write('HTTP/1.0 ' + str(responsecode) + ' ' + responsestring + '\r\n') if not self.pre1: headers['Content-Length'] = len(data) for key, value in headers.items(): r.write(key + ': ' + str(value) + '\r\n') r.write('\r\n') if self.command != 'HEAD': r.write(data) self.connection.write(r.getvalue()) if self.connection.is_flushed(): self.connection.shutdown(1)
def read_config(self): try: newvalues = configfile.get_config(self.config, self.configfile_key) except Exception, e: self.logger.error(_("Error reading config: ") + str_exc(e)) return
def update_key(self, iter, key): """ Modifies the key of the item referenced by iter. If the key change is small enough that no reordering occurs then this takes amortized O(1) time. If a reordering occurs then this takes O(log n). WARNING!!! The passed iterator MUST be assumed to be invalid upon return. Any further operation on the passed iterator other than deallocation results in a RuntimeError exception. Typical use: >>> from CMultiMap import CMultiMap >>> m = CMultiMap() >>> m[10] = 'foo' >>> m[8] = 'bar' >>> i = m.find(10) >>> m.update_key(i,7) # i is assumed to be invalid upon return. >>> del i >>> [(int(x[0]),x[1]) for x in m.items()] # reordering occurred. [(7, 'foo'), (8, 'bar')] >>> i = m.find(8) >>> m.update_key(i,9) # no reordering. >>> del i >>> [(int(x[0]),x[1]) for x in m.items()] [(7, 'foo'), (9, 'bar')] Edge cases: >>> i = m.find(7) >>> i.value() 'foo' >>> m.update_key(i,9) # update to key already in the mmap. >>> m[7] [] >>> m[9] ['foo', 'bar'] >>> i = m.iterkeys() >>> try: # updating an iter pointing at BEGIN. ... m.update_key(i,10) ... except IndexError: ... print 'ok' ... ok >>> i = m.end() >>> try: # updating an iter pointing at end(). ... m.update_key(i,10) ... except IndexError: ... print 'ok' ... ok """ assert isinstance(iter, CMultiMap._AbstractIterator) if not iter._si: raise RuntimeError(_("invalid iterator")) if iter._si == BEGIN: raise IndexError(_("Iterator does not point at key-value pair")) if self is not iter._mmap: raise IndexError(_("Iterator points into a different CMultiMap.")) if mmap_iiter_at_end(self._smmap, iter._si): raise IndexError(_("Cannot erase end() iterator.")) mmap_iiter_update_key(self._smmap, iter._si, key)
# if error # lookup tid if self.tids.has_key(msg[TID]): df = self.tids[msg[TID]] # callback df.errback(msg[ERR]) del (self.tids[msg[TID]]) else: # day late and dollar short pass else: # unknown message type df = self.tids[msg[TID]] # callback df.errback((KRPC_ERROR_RECEIVED_UNKNOWN, _("received unknown message type"))) del (self.tids[msg[TID]]) def sendRequest(self, method, args): # make message # send it msg = {TID: chr(self.mtid), TYP: REQ, REQ: method, ARG: args} self.mtid = (self.mtid + 1) % 256 s = bencode(msg) d = Deferred() self.tids[msg[TID]] = d self.call_later(KRPC_TIMEOUT, self.timeOut, msg[TID]) self.call_later(0, self._send, s, d) return d def timeOut(self, id):
def __init__(self, parent, bitmap, browse, history, *a, **kw): BTDialog.__init__(self, parent, *a, **kw) itemDialog1 = self self.browse_func = browse itemFlexGridSizer2 = wx.FlexGridSizer(3, 1, 3, 0) itemFlexGridSizer2.AddGrowableCol(0) itemDialog1.SetSizer(itemFlexGridSizer2) itemFlexGridSizer3 = wx.FlexGridSizer(2, 2, 21, 0) itemFlexGridSizer3.AddGrowableCol(1) itemFlexGridSizer2.Add( itemFlexGridSizer3, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5) itemStaticBitmap4Bitmap = bitmap #itemStaticBitmap4 = wx.StaticBitmap(itemDialog1, wx.ID_STATIC, itemStaticBitmap4Bitmap) itemStaticBitmap4 = ElectroStaticBitmap(itemDialog1, itemStaticBitmap4Bitmap) itemFlexGridSizer3.Add( itemStaticBitmap4, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5) itemStaticText5 = wx.StaticText( itemDialog1, wx.ID_STATIC, _("Enter the URL or path to a torrent file on the Internet, your computer, or your network that you want to add." ), wx.DefaultPosition, wx.DefaultSize, 0) if text_wrappable: itemStaticText5.Wrap(286) itemFlexGridSizer3.Add( itemStaticText5, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.ALL | wx.ADJUST_MINSIZE, 7) itemStaticText6 = wx.StaticText(itemDialog1, wx.ID_STATIC, _("Open:"), wx.DefaultPosition, wx.DefaultSize, 0) itemFlexGridSizer3.Add( itemStaticText6, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.ALL | wx.ADJUST_MINSIZE, 5) choiceboxStrings = history self.choicebox = wx.ComboBox(itemDialog1, ID_COMBOBOX, choices=choiceboxStrings, size=(267, -1), style=wx.CB_DROPDOWN | wx.TE_PROCESS_ENTER) self.choicebox.Bind(wx.EVT_TEXT, self.OnText) self.choicebox.Bind(wx.EVT_COMBOBOX, self.OnComboBox) self.choicebox.Bind(wx.EVT_TEXT_ENTER, self.OnTextEnter) itemFlexGridSizer3.Add(self.choicebox, 1, wx.ALIGN_CENTER_HORIZONTAL | wx.GROW | wx.ALL, 5) itemBoxSizer8 = wx.BoxSizer(wx.HORIZONTAL) itemFlexGridSizer2.Add( itemBoxSizer8, 0, wx.ALIGN_RIGHT | wx.ALIGN_BOTTOM | wx.TOP | wx.BOTTOM, 1) itemFlexGridSizer9 = wx.FlexGridSizer(2, 3, 0, 2) itemFlexGridSizer9.AddGrowableRow(0) itemBoxSizer8.Add(itemFlexGridSizer9, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 7) itemBoxSizer10 = wx.BoxSizer(wx.HORIZONTAL) itemFlexGridSizer9.Add( itemBoxSizer10, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.ALL, 2) self.okbutton = wx.Button(itemDialog1, wx.ID_OK) itemBoxSizer10.Add(self.okbutton, 0, wx.GROW | wx.ALL | wx.SHAPED, 0) itemBoxSizer12 = wx.BoxSizer(wx.HORIZONTAL) itemFlexGridSizer9.Add( itemBoxSizer12, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.ALL, 2) itemButton13 = wx.Button(itemDialog1, wx.ID_CANCEL) itemBoxSizer12.Add(itemButton13, 0, wx.GROW | wx.ALL | wx.SHAPED, 0) itemBoxSizer14 = wx.BoxSizer(wx.HORIZONTAL) itemFlexGridSizer9.Add( itemBoxSizer14, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.ALL, 2) itemButton15 = wx.Button(itemDialog1, ID_BROWSE, _("&Browse"), wx.DefaultPosition, wx.DefaultSize, 0) itemButton15.Bind(wx.EVT_BUTTON, self.browse) itemBoxSizer14.Add(itemButton15, 0, wx.GROW | wx.ALL | wx.SHAPED, 0) self.okbutton.Disable() self.Fit()
to_add.append(p) continue h = oldval[1] if oldval[0] == v[0]: # file is unchanged from last parse if h: if p in blocked: # parseable + blocked means duplicate to_add.append(p) # other duplicate may have gone away else: new_parsed[h] = parsed[h] new_files[p] = oldval else: new_blocked[p] = None # same broken unparseable file continue if p not in blocked and h in parsed: # modified; remove+add if NOISY: errfunc(_("removing %s (will re-add)") % p) removed[h] = parsed[h] to_add.append(p) to_add.sort() for p in to_add: # then, parse new and changed torrents new_file = new_files[p] v = new_file[0] # new_file[0] is the file's (mod time,sz). infohash = new_file[1] if infohash in new_parsed: # duplicate, i.e., have same infohash. if p not in blocked or files[p][0] != v: errfunc(_("**warning** %s is a duplicate torrent for %s") % (p, new_parsed[infohash][0])) new_blocked[p] = None continue
def _send(self, s, d): try: self.transport.sendto(s, 0, self.addr) except socket.error: d.errback((KRPC_SOCKET_ERROR, _("socket error")))
# Exceptions are now reported via loggers. #def global_error(self, level, text): # self.output.message(text) # Exceptions are now reported via loggers. #def exchandler(self, s): # self.output.exception(s) def read_config(self): try: newvalues = configfile.get_config(self.config, self.configfile_key) except Exception, e: self.logger.error(_("Error reading config: ") + str_exc(e)) return self.logger.info(_("Rereading config file")) self.config.update(newvalues) # The set_option call can potentially trigger something that kills # the torrent (when writing this the only possibility is a change in # max_files_open causing an IOError while closing files), and so # the self.failed() callback can run during this loop. for option, value in newvalues.iteritems(): self.multitorrent.set_option(option, value) for torrent in self.downloads.values(): if torrent is not None: for option, value in newvalues.iteritems(): torrent.set_option(option, value) # rest are callbacks from torrent instances def started(self, torrent): path, metainfo = self.torrent_cache[torrent.infohash]
elif msg[TYP] == ERR: # if error # lookup tid if self.tids.has_key(msg[TID]): df = self.tids[msg[TID]] # callback df.errback(msg[ERR]) del(self.tids[msg[TID]]) else: # day late and dollar short pass else: # unknown message type df = self.tids[msg[TID]] # callback df.errback((KRPC_ERROR_RECEIVED_UNKNOWN, _("received unknown message type"))) del(self.tids[msg[TID]]) def sendRequest(self, method, args): # make message # send it msg = {TID : chr(self.mtid), TYP : REQ, REQ : method, ARG : args} self.mtid = (self.mtid + 1) % 256 s = bencode(msg) d = Deferred() self.tids[msg[TID]] = d self.call_later(KRPC_TIMEOUT, self.timeOut, msg[TID]) self.call_later(0, self._send, s, d) return d def timeOut(self, id):
def get_defaults(ui): assert ui in ( "bittorrent", "bittorrent-curses", "bittorrent-console", "maketorrent", "maketorrent-console", "launchmany-curses", "launchmany-console", "bittorrent-tracker", ) r = [] if ui == "bittorrent-tracker": r.extend(tracker_options) elif ui.startswith('bittorrent') or ui.startswith('launchmany'): r.extend(common_options) if ui == 'bittorrent': r.extend([ ('publish', '', _("path to the file that you are publishing (seeding).")), ('verbose', False, _("display verbose information in user interface")), ('debug', False, _("provide debugging tools in user interface")), ('pause', False, _("start downloader in paused state")), ('open_from', u'', 'local directory to look in for .torrent files to open'), ('start_minimized', False, _("Start %s minimized") % app_name), ('force_start_minimized', False, _("Start %s minimized (but do not save that preference)") % app_name), ('confirm_quit', CONFIRM_QUIT_DEFAULT, _("Confirm before quitting %s") % app_name), ('new_version', '', _("override the version provided by the http version check " "and enable version check debugging mode")), ('current_version', '', _("override the current version used in the version check " "and enable version check debugging mode")), # remember GUI state ('geometry', '', _("specify window size and position, in the format: " "WIDTHxHEIGHT+XOFFSET+YOFFSET")), ('start_maximized', False, _("Start %s maximized") % app_name), ('column_widths', {}, _("Widths of columns in torrent list in main window")), ('column_order', [ 'name', 'progress', 'eta', 'drate', 'urate', 'peers', 'priority', 'state' ], _("Order of columns in torrent list in main window")), ('enabled_columns', ['name', 'progress', 'eta', 'drate', 'priority'], _("Enabled columns in torrent list in main window")), ('sort_column', 'name', _("Default sort column in torrent list in main window")), ('sort_ascending', True, _("Default sort order in torrent list in main window")), ('toolbar_text', True, _("Whether to show text on the toolbar or not")), ('toolbar_size', 24, _("Size in pixels of toolbar icons")), ('show_details', False, _("Show details panel on startup")), ('settings_tab', 0, _("Which tab in the settings window to show by default")), ('details_tab', 0, _("Which tab in the details panel to show by default")), ('splitter_height', 300, _("Height of the details splitter when it is enabled")), ('ask_for_save', True, _("whether or not to ask for a location to save downloaded " "files in")), ( 'max_upload_rate', 40960, # 40KB/s up _("maximum B/s to upload at")), ]) if os.name == 'nt': r.extend([ ('launch_on_startup', True, _("Launch %s when Windows starts") % app_name), ('minimize_to_tray', True, _("Minimize to the system tray")), ('close_to_tray', True, _("Close to the system tray")), ('enforce_association', True, _("Enforce .torrent file associations on startup")), ]) progress_bar = [ 'progressbar_style', 3, _("The style of progressbar to show. 0 means no progress " "bar. 1 is an ordinary progress bar. 2 is a progress " "bar that shows transferring, available and missing " "percentages as well. 3 is a piece bar which " "color-codes each piece in the torrent based on its " "availability.") ] if sys.platform == "darwin": # listctrl placement of the progress bars does not work on Carbon progress_bar[1] = 0 r.extend([ progress_bar, ]) if ui in ('bittorrent', 'maketorrent'): r.append(('theme', 'default', _("Icon theme to use"))) if ui.startswith('bittorrent') and ui != "bittorrent-tracker": r.extend([ ('max_uploads', -1, _("the maximum number of uploads to allow at once. -1 means a " "(hopefully) reasonable number based on --max_upload_rate. " "The automatic values are only sensible when running one " "torrent at a time.")), ('save_in', u'', _("local directory where the torrent contents will be saved. The " "file (single-file torrents) or directory (batch torrents) will " "be created under this directory using the default name " "specified in the .torrent file. See also --save_as.")), ('save_incomplete_in', u'', _("local directory where the incomplete torrent downloads will be " "stored until completion. Upon completion, downloads will be " "moved to the directory specified by --save_in.")), ]) r.extend(bandwidth_management_options) if ui.startswith('launchmany'): r.extend([ ('max_uploads', 6, _("the maximum number of uploads to allow at once. -1 means a " "(hopefully) reasonable number based on --max_upload_rate. The " "automatic values are only sensible when running one torrent at " "a time.")), ('save_in', u'', _("local directory where the torrents will be saved, using a " "name determined by --saveas_style. If this is left empty " "each torrent will be saved under the directory of the " "corresponding .torrent file")), ('save_incomplete_in', u'', _("local directory where the incomplete torrent downloads will be " "stored until completion. Upon completion, downloads will be " "moved to the directory specified by --save_in.")), ('parse_dir_interval', 60, _("how often to rescan the torrent directory, in seconds")), ('launch_delay', 0, _("wait this many seconds after noticing a torrent before starting it, to avoid race with tracker" )), ('saveas_style', 4, _("How to name torrent downloads: " "1: use name OF torrent file (minus .torrent); " "2: use name encoded IN torrent file; " "3: create a directory with name OF torrent file " "(minus .torrent) and save in that directory using name " "encoded IN torrent file; " "4: if name OF torrent file (minus .torrent) and name " "encoded IN torrent file are identical, use that " "name (style 1/2), otherwise create an intermediate " "directory as in style 3; " "CAUTION: options 1 and 2 have the ability to " "overwrite files without warning and may present " "security issues.")), ('display_path', ui == 'launchmany-console' and True or False, _("whether to display the full path or the torrent contents for " "each torrent")), ]) if ui.startswith('launchmany') or ui == 'maketorrent': r.append( ('torrent_dir', u'', _("directory to look for .torrent files (semi-recursive)")), ) if ui.startswith('maketorrent'): r.append(('content_type', '', _("file's default mime type."))) # HEREDAVE batch torrents must be handled differently. if ui in ('bittorrent-curses', 'bittorrent-console'): r.extend([ ('save_as', u'', _("file name (for single-file torrents) or directory name (for " "batch torrents) to save the torrent as, overriding the " "default name in the torrent. See also --save_in")), ('spew', False, _("whether to display diagnostic info to stdout")), ]) if ui == 'bittorrent-console': r.extend([ ('display_interval', 5, _("seconds between updates of displayed information")), ]) elif ui.startswith('launchmany-console'): r.extend([ ('display_interval', 60, _("seconds between updates of displayed information")), ]) elif ui.startswith('launchmany-curses'): r.extend([ ('display_interval', 3, _("seconds between updates of displayed information")), ]) if ui.startswith('maketorrent'): r.extend([ ('title', '', _("optional human-readable title for entire .torrent")), ('comment', '', _("optional human-readable comment to put in .torrent")), ('piece_size_pow2', 0, _("which power of two to set the piece size to, " "0 means pick a good piece size")), ('tracker_name', '', _("default tracker name")), ('tracker_list', '', ''), ('use_tracker', True, _("if false then make a trackerless torrent, instead of " "announce URL, use reliable node in form of <ip>:<port> or an " "empty string to pull some nodes from your routing table")), ('verbose', False, _("display verbose information in user interface")), ('debug', False, _("provide debugging tools in user interface")), ]) r.extend(basic_options) if (ui.startswith('bittorrent') or ui.startswith('launchmany')) \ and ui != "bittorrent-tracker": r.extend(rare_options) return r
# Written by Bram Cohen app_name = "BitTorrent" from BTL.translation import _ import sys import locale from BitTorrent.defaultargs import get_defaults from BitTorrent import configfile from BitTorrent.makemetafile import make_meta_files from BitTorrent.parseargs import parseargs, printHelp from BitTorrent import BTFailure defaults = get_defaults('maketorrent-console') defaults.extend([ ('target', '', _("optional target file for the torrent")), ]) defconfig = dict([(name, value) for (name, value, doc) in defaults]) del name, value, doc def dc(v): print v def prog(amount): print '%.1f%% complete\r' % (amount * 100), if __name__ == '__main__':
def timeOut(self, id): if self.tids.has_key(id): df = self.tids[id] del(self.tids[id]) df.errback((KRPC_ERROR_TIMEOUT, _("timeout")))
self.singleport_listener.open_port(port, self.config) if self.config['start_trackerless_client']: self.dht = UTKhashmir( self.config['bind'], self.singleport_listener.get_port(), self.data_dir, self.rawserver, int(self.config['max_upload_rate'] * 0.01), rlcount=self.up_ratelimiter.increase_offset, config=self.config) break except socket.error, e: exc_info = sys.exc_info() else: if not listen_fail_ok: raise BTFailure, (_("Could not open a listening port: %s.") % str_exc(e)) self.global_error( logging.CRITICAL, (_("Could not open a listening port: %s. ") % e) + (_("Check your port range settings (%s:%s-%s).") % (self.config['bind'], self.config['minport'], self.config['maxport'])), exc_info=exc_info) def shutdown(self): df = launch_coroutine(wrap_task(self.rawserver.add_task), self._shutdown) df.addErrback(lambda f: self.logger.error('shutdown failed!', exc_info=f.exc_info())) return df
def __init__(self, config, rawserver): self.config = config self.response_size = config['response_size'] self.max_give = config['max_give'] self.dfile = efs2(config['dfile']) self.natcheck = config['nat_check'] favicon = config['favicon'] self.favicon = None if favicon: try: h = open(favicon, 'r') self.favicon = h.read() h.close() except: errorfunc( logging.WARNING, _("specified favicon file -- %s -- does not exist.") % favicon) self.rawserver = rawserver self.cached = { } # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]] self.cached_t = {} # format: infohash: [time, cache] self.times = {} self.state = {} self.seedcount = {} self.save_pending = False self.parse_pending = False self.only_local_override_ip = config['only_local_override_ip'] if self.only_local_override_ip == 2: self.only_local_override_ip = not config['nat_check'] if os.path.exists(self.dfile): try: h = open(self.dfile, 'rb') ds = h.read() h.close() try: tempstate = cPickle.loads(ds) except: tempstate = bdecode(ds) # backwards-compatibility. if not tempstate.has_key('peers'): tempstate = {'peers': tempstate} statefiletemplate(tempstate) self.state = tempstate except: errorfunc(logging.WARNING, _("statefile %s corrupt; resetting") % self.dfile) self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) self.becache = {} # format: infohash: [[l1, s1], [l2, s2], [l3, s3]] for infohash, ds in self.downloads.iteritems(): self.seedcount[infohash] = 0 for x, y in ds.iteritems(): if not y.get('nat', -1): ip = y.get('given_ip') if not (ip and self.allow_local_override(y['ip'], ip)): ip = y['ip'] self.natcheckOK(infohash, x, ip, y['port'], y['left']) if not y['left']: self.seedcount[infohash] += 1 for infohash in self.downloads: self.times[infohash] = {} for peerid in self.downloads[infohash]: self.times[infohash][peerid] = 0 self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] rawserver.add_task(self.save_dfile_interval, self.save_dfile) self.prevtime = time() self.timeout_downloaders_interval = config[ 'timeout_downloaders_interval'] rawserver.add_task(self.timeout_downloaders_interval, self.expire_downloaders) self.logfile = None self.log = None if (config['logfile'] != '') and (config['logfile'] != '-'): try: self.logfile = config['logfile'] self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log Started: "), isotime() except: print _("**warning** could not redirect stdout to log file: " ), sys.exc_info()[0] if config['hupmonitor']: def huphandler(signum, frame, self=self): try: self.log.close() self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log reopened: "), isotime() except: print _("***warning*** could not reopen logfile") signal.signal(signal.SIGHUP, huphandler) self.allow_get = config['allow_get'] if config['allowed_dir'] != '': self.allowed_dir = config['allowed_dir'] self.parse_dir_interval = config['parse_dir_interval'] self.allowed = self.state.setdefault('allowed', {}) self.allowed_dir_files = self.state.setdefault( 'allowed_dir_files', {}) self.allowed_dir_blocked = {} self.parse_allowed() else: try: del self.state['allowed'] except: pass try: del self.state['allowed_dir_files'] except: pass self.allowed = None self.uq_broken = unquote('+') != ' ' self.keep_dead = config['keep_dead']
def create_torrent(self, metainfo, save_incomplete_as, save_as, hidden=False, is_auto_update=False, feedback=None): if self.is_single_torrent and len(self.torrents) > 0: raise TooManyTorrents( _("MultiTorrent is set to download only " "a single torrent, but tried to create more than one.")) infohash = metainfo.infohash if self.torrent_known(infohash): if self.torrent_running(infohash): msg = _("This torrent (or one with the same contents) is " "already running.") raise TorrentAlreadyRunning(msg) else: raise TorrentAlreadyInQueue( _("This torrent (or one with " "the same contents) is " "already waiting to run.")) self._dump_metainfo(metainfo) #BUG. Use _read_torrent_config for 5.0? --Dave config = configfile.read_torrent_config( self.config, self.data_dir, infohash, lambda s: self.global_error(logging.ERROR, s)) t = Torrent(metainfo, save_incomplete_as, save_as, self.config, self.data_dir, self.rawserver, self.choker, self.singleport_listener, self.up_ratelimiter, self.down_ratelimiter, self.total_downmeasure, self.filepool, self.dht, self, self.log_root, hidden=hidden, is_auto_update=is_auto_update) if feedback: t.add_feedback(feedback) retdf = Deferred() def torrent_started(*args): if config: t.update_config(config) t._dump_torrent_config() if self.resume_from_torrent_config: self._dump_torrents() t.metainfo.show_encoding_errors(self.logger.log) retdf.callback(t) df = self._init_torrent(t, use_policy=False) df.addCallback(torrent_started) return retdf
def remove_auto_updates_except(self, infohash): for t in self.torrents.values(): if t.is_auto_update and t.metainfo.infohash != infohash: self.logger.warning( _("Cleaning up old autoupdate %s") % t.metainfo.name) self.remove_torrent(t.metainfo.infohash, del_files=True)
def check_info(info, check_paths=True): if not isinstance(info, dict): raise BTFailure, _("bad metainfo - not a dictionary") pieces = info.get('pieces') if type(pieces) != str or len(pieces) % 20 != 0 or len(pieces) == 0: raise BTFailure, _("bad metainfo - bad pieces key") piecelength = info.get('piece length') if type(piecelength) not in ints or piecelength <= 0: raise BTFailure, _("bad metainfo - illegal piece length") name = info.get('name') if not isinstance(name, str): raise BTFailure, _("bad metainfo - bad name") #if not allowed_path_re.match(name): # raise BTFailure, _("name %s disallowed for security reasons") % name if info.has_key('files') == info.has_key('length'): raise BTFailure, _("single/multiple file mix") if info.has_key('length'): length = info.get('length') if type(length) not in ints or length < 0: raise BTFailure, _("bad metainfo - bad length") else: files = info.get('files') if type(files) != list: raise BTFailure, _('bad metainfo - "files" is not a list of files') for f in files: if type(f) != dict: raise BTFailure, _("bad metainfo - file entry must be a dict") length = f.get('length') if type(length) not in ints or length < 0: raise BTFailure, _("bad metainfo - bad length") path = f.get('path') if type(path) != list or path == []: raise BTFailure, _("bad metainfo - bad path") for p in path: if type(p) != str: raise BTFailure, _("bad metainfo - bad path dir") if check_paths and not allowed_path_re.match(p): raise BTFailure, _( "path %s disallowed for security reasons") % p f = ['/'.join(x['path']) for x in files] f.sort() i = iter(f) try: name2 = i.next() while True: name1 = name2 name2 = i.next() if name2.startswith(name1): if name1 == name2: raise BTFailure, _("bad metainfo - duplicate path") elif name2[len(name1)] == '/': raise BTFailure( _("bad metainfo - name used as both" "file and subdirectory name")) except StopIteration: pass
# Written by Henry 'Pi' James, Loring Holden and Matt Chisholm app_name = "BitTorrent" from BTL.translation import _ import time from sys import * from os.path import * from hashlib import sha1 from BTL.bencode import * from BitTorrent import version NAME, EXT = splitext(basename(argv[0])) print _("%s %s - decode %s metainfo files") % (NAME, version, app_name) print if len(argv) == 1: print _("Usage: %s [TORRENTFILE [TORRENTFILE ... ] ]") % basename(argv[0]) print exit(2) # common exit code for syntax error labels = { 'metafile': _("metainfo file: %s"), 'infohash': _("info hash: %s"), 'filename': _("file name: %s"), 'filesize': _("file size:"), 'files': _("files:"), 'title': _("title: %s"), 'dirname': _("directory name: %s"),
def __init__(self, metainfo): """metainfo is a dict. When read from a metainfo (i.e., .torrent file), the file must first be bdecoded before being passed to ConvertedMetainfo.""" self.bad_torrent_wrongfield = False self.bad_torrent_unsolvable = False self.bad_torrent_noncharacter = False self.bad_conversion = False self.bad_windows = False self.bad_path = False self.reported_errors = False # All of the following values should be considered READONLY. # Modifications to the metainfo that should be written should # occur to the underlying metainfo dict directly. self.is_batch = False self.orig_files = None self.files_fs = None self.total_bytes = 0 self.sizes = [] self.comment = None self.title = None # descriptive title text for whole torrent self.creation_date = None self.metainfo = metainfo self.encoding = None self.caches = None #EZ micropayments are used self.micropayments = False btformats.check_message(metainfo, check_paths=False) info = metainfo['info'] self.is_private = info.has_key("private") and info['private'] if 'encoding' in metainfo: self.encoding = metainfo['encoding'] elif 'codepage' in metainfo: self.encoding = 'cp%s' % metainfo['codepage'] if self.encoding is not None: try: for s in u'this is a test', u'these should also work in any encoding: 0123456789\0': assert s.encode(self.encoding).decode(self.encoding) == s except: self.encoding = 'iso-8859-1' self.bad_torrent_unsolvable = True if info.has_key('length'): self.total_bytes = info['length'] self.sizes.append(self.total_bytes) if info.has_key('content_type'): self.content_type = info['content_type'] else: self.content_type = None # hasattr or None. Which is better? else: self.is_batch = True r = [] self.orig_files = [] self.sizes = [] self.content_types = [] i = 0 # info['files'] is a list of dicts containing keys: # 'length', 'path', and 'content_type'. The 'content_type' # key is optional. for f in info['files']: l = f['length'] self.total_bytes += l self.sizes.append(l) self.content_types.append(f.get('content_type')) path = self._get_attr(f, 'path') if len(path[-1]) == 0: if l > 0: raise BTFailure(_("Bad file path component: ")+x) # BitComet makes .torrent files with directories # listed along with the files, which we don't support # yet, in part because some idiot interpreted this as # a bug in BitComet rather than a feature. path.pop(-1) for x in path: if not btformats.allowed_path_re.match(x): raise BTFailure(_("Bad file path component: ")+x) self.orig_files.append('/'.join(path)) k = [] for u in path: tf2 = self._to_fs_2(u) k.append((tf2, u)) r.append((k,i)) i += 1 # If two or more file/subdirectory names in the same directory # would map to the same name after encoding conversions + Windows # workarounds, change them. Files are changed as # 'a.b.c'->'a.b.0.c', 'a.b.1.c' etc, directories or files without # '.' as 'a'->'a.0', 'a.1' etc. If one of the multiple original # names was a "clean" conversion, that one is always unchanged # and the rest are adjusted. r.sort() self.files_fs = [None] * len(r) prev = [None] res = [] stack = [{}] for x in r: j = 0 x, i = x while x[j] == prev[j]: j += 1 del res[j:] del stack[j+1:] name = x[j][0][1] if name in stack[-1]: for name in generate_names(x[j][1], j != len(x) - 1): name = self._to_fs(name) if name not in stack[-1]: break stack[-1][name] = None res.append(name) for j in xrange(j + 1, len(x)): name = x[j][0][1] stack.append({name: None}) res.append(name) self.files_fs[i] = os.path.join(*res) prev = x self.name = self._get_attr(info, 'name') self.name_fs = self._to_fs(self.name) self.piece_length = info['piece length'] self.announce = metainfo.get('announce') self.announce_list = metainfo.get('announce-list') if 'announce-list' not in metainfo and 'announce' not in metainfo: self.is_trackerless = True else: self.is_trackerless = False #EZ if 'micropayments' in metainfo and metainfo['micropayments'] == True: print "found micropayments ==true in metafile" self.micropayments = True self.nodes = metainfo.get('nodes', [('router.bittorrent.com', 6881)]) self.title = metainfo.get('title') self.comment = metainfo.get('comment') self.creation_date = metainfo.get('creation date') self.locale = metainfo.get('locale') self.safe = metainfo.get('safe') self.url_list = metainfo.get('url-list', []) if not isinstance(self.url_list, list): self.url_list = [self.url_list, ] self.caches = metainfo.get('caches') self.hashes = [info['pieces'][x:x+20] for x in xrange(0, len(info['pieces']), 20)] self.infohash = InfoHashType(sha(bencode(info)).digest())
except: pass import wx import wx.grid import wxPython from BTL.translation import _ from BitTorrent.platform import image_root import BTL.stackthreading as threading from BTL.defer import ThreadedDeferred import bisect vs = wxPython.__version__ min_wxpython = "2.6" assert vs >= min_wxpython, _( "wxPython version %s or newer required") % min_wxpython assert 'unicode' in wx.PlatformInfo, _( "The Unicode versions of wx and wxPython are required") text_wrappable = wx.__version__[4] >= '2' profile = False if profile: from BTL.profile import Profiler, Stats prof_file_name = 'ui.mainloop.prof' def gui_wrap(_f, *args, **kwargs): wx.the_app.CallAfter(_f, *args, **kwargs)
# -3 for stdin, stdout, and stderr # -15 for a buffer MAX_FILES_OPEN = ctypes.cdll.msvcrt._getmaxstdio() - 3 - 15 if os.name == 'nt': CONFIRM_QUIT_DEFAULT = True else: CONFIRM_QUIT_DEFAULT = False from BTL.language import languages from BTL.platform import app_name basic_options = [ ('data_dir', u'', _("directory under which variable data such as fastresume information " "and GUI state is saved. Defaults to subdirectory 'data' of the " "bittorrent config directory.")), ('language', '', _("ISO Language code to use") + ': ' + ', '.join(languages)), ('use_factory_defaults', False, _("Starts the application in a debug mode. All settings revert to " "default except those provided as command-line options. Creates " "temporary directories for dot, data, incomplete torrents and " "complete torrents. Allows multiple clients on the same machine to " "communicate with each other.")), ] common_options = [ ('ip', '', _("ip to report to the tracker (has no effect unless you are on the same " "local network as the tracker)")),
def update_key(self, iter, key): """ see CMultiMap.update_key. WARNING!! You MUST assume that the passed iterator is invalidated upon return. Typical use: >>> from CMultiMap import CIndexedMultiMap >>> m = CIndexedMultiMap() >>> m[10] = 'foo' >>> m[8] = 'bar' >>> i = m.find(10) >>> m.update_key(i,7) # i is assumed to be invalid upon return. >>> del i >>> int(m.get_key_by_value('foo')) 7 >>> [(int(x[0]),x[1]) for x in m.items()] # reordering occurred. [(7, 'foo'), (8, 'bar')] >>> i = m.find(8) >>> m.update_key(i,9) # no reordering. >>> del i >>> [(int(x[0]),x[1]) for x in m.items()] [(7, 'foo'), (9, 'bar')] Edge cases: >>> i = m.find(7) >>> i.value() 'foo' >>> m.update_key(i,9) >>> m[7] [] >>> m[9] ['foo', 'bar'] >>> int(m.get_key_by_value('foo')) 9 >>> i = m.iterkeys() >>> try: # updating an iter pointing at BEGIN. ... m.update_key(i,10) ... except IndexError: ... print 'ok' ... ok >>> i = m.end() >>> try: # updating an iter pointing at end(). ... m.update_key(i,10) ... except IndexError: ... print 'ok' ... ok """ if not iter._si: raise RuntimeError(_("invalid iterator")) if iter._si == BEGIN: raise IndexError(_("Iterator does not point at key-value pair")) if self is not iter._mmap: raise IndexError( _("Iterator points into a different " "CIndexedMultiMap.")) if mmap_iiter_at_end(self._smmap, iter._si): raise IndexError(_("Cannot update end() iterator.")) si = mmap_iiter_update_key_iiter(self._smmap, iter._si, key) # raises KeyError if key already in mmap. if si != iter._si: # if mmap is reordered... value = iter.value() val_si = self._value_index[value] iiter_delete(val_si) self._value_index[value] = si
def item(self): """@return the key-value pair referenced by this iterator. """ if not self._si: raise RuntimeError(_("invalid iterator")) return self.key(), self.value()