def tounicode(raw): # {{{ if isbytestring(raw): try: raw = raw.decode(preferred_encoding, 'replace') except: raw = safe_repr(raw) if isbytestring(raw): try: raw.decode('utf-8', 'replace') except: raw = u'Undecodable bytestring' return raw
def analyse (exctyp, value, tb): import tokenize, keyword trace = StringIO() nlines = 1 frecs = inspect.getinnerframes (tb, nlines) trace.write ('Variables:\n') for frame, fname, lineno, funcname, _context, _cindex in frecs: trace.write (' File "%s", line %d, ' % (fname, lineno)) args, varargs, varkw, lcls = inspect.getargvalues (frame) def readline (lno=[lineno], *args): if args: print args try: return linecache.getline (fname, lno[0]) finally: lno[0] += 1 all, prev, name, scope = {}, None, '', None for ttype, tstr, _stup, _etup, _line in tokenize.generate_tokens (readline): if ttype == tokenize.NAME and tstr not in keyword.kwlist: if name: if name[-1] == '.': try: val = getattr (prev, tstr) except AttributeError: # XXX skip the rest of this identifier only break name += tstr else: assert not name and not scope scope, val = lookup (tstr, frame, lcls) name = tstr if val is not None: prev = val #print ' found', scope, 'name', name, 'val', val, 'in', prev, 'for token', tstr elif tstr == '.': if prev: name += '.' else: if name: all[name] = prev prev, name, scope = None, '', None if ttype == tokenize.NEWLINE: break trace.write (funcname + inspect.formatargvalues (args, varargs, varkw, lcls, formatvalue=lambda v: '=' + safe_repr (v)) + '\n') if len (all): trace.write (' %s\n' % str (all)) trace.write('\n') traceback.print_exception (exctyp, value, tb, None, trace) return trace.getvalue()
def analyse(exctyp, value, tb): import tokenize, keyword trace = StringIO() nlines = 1 frecs = inspect.getinnerframes(tb, nlines) trace.write('Variables:\n') for frame, fname, lineno, funcname, _context, _cindex in frecs: trace.write(' File "%s", line %d, ' % (fname, lineno)) args, varargs, varkw, lcls = inspect.getargvalues(frame) def readline(lno=[lineno], *args): if args: print args try: return linecache.getline(fname, lno[0]) finally: lno[0] += 1 all, prev, name, scope = {}, None, '', None for ttype, tstr, _stup, _etup, _line in tokenize.generate_tokens( readline): if ttype == tokenize.NAME and tstr not in keyword.kwlist: if name: if name[-1] == '.': try: val = getattr(prev, tstr) except AttributeError: # XXX skip the rest of this identifier only break name += tstr else: assert not name and not scope scope, val = lookup(tstr, frame, lcls) name = tstr if val is not None: prev = val #print ' found', scope, 'name', name, 'val', val, 'in', prev, 'for token', tstr elif tstr == '.': if prev: name += '.' else: if name: all[name] = prev prev, name, scope = None, '', None if ttype == tokenize.NEWLINE: break trace.write( funcname + inspect.formatargvalues(args, varargs, varkw, lcls, formatvalue=lambda v: '=' + safe_repr(v)) + '\n') if len(all): trace.write(' %s\n' % str(all)) trace.write('\n') traceback.print_exception(exctyp, value, tb, None, trace) return trace.getvalue()