def format_stack_entry(self, frame_lineno, lprefix=': '): """Return a string with information about a stack entry. The stack entry frame_lineno is a (frame, lineno) tuple. The return string contains the canonical filename, the function name or '<lambda>', the input arguments, the return value, and the line of code (if it exists). """ import linecache, reprlib frame, lineno = frame_lineno filename = self.canonic(frame.f_code.co_filename) s = '%s(%r)' % (filename, lineno) if frame.f_code.co_name: s += frame.f_code.co_name else: s += "<lambda>" if '__args__' in frame.f_locals: args = frame.f_locals['__args__'] else: args = None if args: s += reprlib.repr(args) else: s += '()' if '__return__' in frame.f_locals: rv = frame.f_locals['__return__'] s += '->' s += reprlib.repr(rv) line = linecache.getline(filename, lineno, frame.f_globals) if line: s += lprefix + line.strip() return s
def format_stack_entry(self, frame_lineno, lprefix=': '): import linecache import reprlib (frame, lineno) = frame_lineno filename = self.canonic(frame.f_code.co_filename) s = '%s(%r)' % (filename, lineno) if frame.f_code.co_name: s += frame.f_code.co_name else: s += '<lambda>' if '__args__' in frame.f_locals: args = frame.f_locals['__args__'] else: args = None if args: s += reprlib.repr(args) else: s += '()' if '__return__' in frame.f_locals: rv = frame.f_locals['__return__'] s += '->' s += reprlib.repr(rv) line = linecache.getline(filename, lineno, frame.f_globals) if line: s += lprefix + line.strip() return s
def format_stack_entry(self, frame_lineno, lprefix=": ", context=3): try: import reprlib # Py 3 except ImportError: import repr as reprlib # Py 2 ret = [] Colors = self.color_scheme_table.active_colors ColorsNormal = Colors.Normal tpl_link = u"%s%%s%s" % (Colors.filenameEm, ColorsNormal) tpl_call = u"%s%%s%s%%s%s" % (Colors.vName, Colors.valEm, ColorsNormal) tpl_line = u"%%s%s%%s %s%%s" % (Colors.lineno, ColorsNormal) tpl_line_em = u"%%s%s%%s %s%%s%s" % (Colors.linenoEm, Colors.line, ColorsNormal) frame, lineno = frame_lineno return_value = "" if "__return__" in frame.f_locals: rv = frame.f_locals["__return__"] # return_value += '->' return_value += reprlib.repr(rv) + "\n" ret.append(return_value) # s = filename + '(' + `lineno` + ')' filename = self.canonic(frame.f_code.co_filename) link = tpl_link % py3compat.cast_unicode(filename) if frame.f_code.co_name: func = frame.f_code.co_name else: func = "<lambda>" call = "" if func != "?": if "__args__" in frame.f_locals: args = reprlib.repr(frame.f_locals["__args__"]) else: args = "()" call = tpl_call % (func, args) # The level info should be generated in the same format pdb uses, to # avoid breaking the pdbtrack functionality of python-mode in *emacs. if frame is self.curframe: ret.append("> ") else: ret.append(" ") ret.append(u"%s(%s)%s\n" % (link, lineno, call)) start = lineno - 1 - context // 2 lines = ulinecache.getlines(filename) start = min(start, len(lines) - context) start = max(start, 0) lines = lines[start : start + context] for i, line in enumerate(lines): show_arrow = start + 1 + i == lineno linetpl = (frame is self.curframe or show_arrow) and tpl_line_em or tpl_line ret.append(self.__format_line(linetpl, filename, start + 1 + i, line, arrow=show_arrow)) return "".join(ret)
def _dorequest(self, rf, wf): rp = pickle.Unpickler(rf) try: request = rp.load() except EOFError: return 0 if self._verbose > 1: print("Got request: %s" % repr(request)) try: methodname, args, id = request if '.' in methodname: reply = (None, self._special(methodname, args), id) elif methodname[0] == '_': raise NameError("illegal method name %s" % repr(methodname)) else: method = getattr(self, methodname) reply = (None, method(*args), id) except: reply = (sys.exc_info()[:2], id) if id < 0 and reply[:2] == (None, None): if self._verbose > 1: print("Suppress reply") return 1 if self._verbose > 1: print("Send reply: %s" % repr(reply)) wp = pickle.Pickler(wf) wp.dump(reply) return 1
def __repr__(self): "Dunder method that gives a string representation for the programmer" class_name = type(self).__name__ components = reprlib.repr(list(itertools.islice(self._times, 0, 10))) components2 = reprlib.repr(list(itertools.islice(self._values, 0, 10))) components = components[components.find('['):] components2 = components2[components2.find('['):] return '{}({}, {})'.format(class_name, components, components2)
def echo_cli( number, reps ): log.normal( "Echo Client %3d connecting... PID [%5d]", number, os.getpid() ) conn = socket.socket( socket.AF_INET, socket.SOCK_STREAM ) conn.connect( echo.address ) log.detail( "Echo Client %3d connected", number ) sent = b'' rcvd = b'' try: # Send messages and collect replies 'til done (or incoming EOF). Then, shut down # outgoing half of socket to drain server and shut down server. eof = False for r in range( reps ): msg = ("Client %3d, rep %d\r\n" % ( number, r )).encode() log.detail("Echo Client %3d send: %5d: %s", number, len( msg ), reprlib.repr( msg )) sent += msg while len( msg ) and not eof: out = min( len( msg ), random.randrange( *charrange )) conn.send( msg[:out] ) msg = msg[out:] # Await inter-block chardelay if output remains, otherwise await final response # before dropping out to shutdown/drain/close. If we drop out immediately and send # a socket.shutdown, it'll sometimes deliver a reset to the server end of the # socket, before delivering the last of the data. rpy = network.recv( conn, timeout=chardelay if len( msg ) else draindelay ) if rpy is not None: eof = not len( rpy ) log.detail( "Echo Client %3d recv: %5d: %s", number, len( rpy ), "EOF" if eof else reprlib.repr( rpy )) rcvd += rpy if eof: break log.normal( "Echo Client %3d done; %s", number, "due to EOF" if eof else "normal termination" ) except KeyboardInterrupt as exc: log.warning( "Echo Client %3d terminated: %r", number, exc ) except Exception as exc: log.warning( "Echo Client %3d failed: %r\n%s", number, exc, traceback.format_exc() ) finally: # One or more packets may be in flight; wait 'til we timeout/EOF. This shuts down conn. rpy = network.drain( conn, timeout=draindelay ) log.info( "Echo Client %3d drain %5d: %s", number, len( rpy ) if rpy is not None else 0, reprlib.repr( rpy )) if rpy is not None: rcvd += rpy # Count the number of success/failures reported by the Echo client threads failed = not ( rcvd == sent ) if failed: log.warning( "Echo Client %3d failed: %s != %s sent", number, reprlib.repr( rcvd ), reprlib.repr( sent )) log.info( "Echo Client %3d exited", number ) return failed
def __repr__(self): """ Only prints the first six times and values of the time series """ class_name = type(self).__name__ t_components = reprlib.repr(list(itertools.islice(self._times, 0, len(self)))) t_components = t_components[t_components.find('['):] v_components = reprlib.repr(list(itertools.islice(self._values, 0, len(self)))) v_components = v_components[v_components.find('['):] return "{}(times=({}, values={}))".format(class_name, t_components, v_components)
def __str__(self): """ Only prints the first six times and values of the time series, in a human readable way. """ class_name = type(self).__name__ t_components = reprlib.repr(list(itertools.islice(self._times, 0, len(self)))) t_components = t_components[t_components.find('['):] v_components = reprlib.repr(list(itertools.islice(self._values, 0, len(self)))) v_components = v_components[v_components.find('['):] return "{} with {} elements: ({}, {})".format(class_name, len(self._times), t_components, v_components)
def __str__(self): """ Dunder method that gives a shortened string representation of the TimeSeries [time1, time2, ...], [value1, value2, ...] """ components = reprlib.repr(list(itertools.islice(self._times, 0, 10))) components2 = reprlib.repr(list(itertools.islice(self._values, 0, 10))) components = components[components.find('['):] components2 = components2[components2.find('['):] return '{}, {}'.format(components, components2)
def test(): print(os.path.curdir + os.path.sep + sys.argv[0]) for i in itertools.repeat(1, 3): print(i) for i, v in enumerate([1, 2, 3, 4]): print(i, v) questions = ['name', 'quest', 'favorite color'] answers = ['lancelot', 'the holy grail', 'blue'] for q, a in zip(questions, answers): print('What is your {0}? It is {1}.'.format(q, a)) sys.stderr.write("NO Error exists") print(glob.glob("*.py")) a = re.findall(r'\bf[a-z]*', 'which foot or hand fell fastest') b = re.sub(r'(\b[a-z]+) \1', r'\1', 'cat in the the hat') print(a, b) random.choice(['apple', 'pear', 'banana']) print((datetime.date.today() - datetime.date(1979, 4, 19)).days) reprlib.repr(set('supercalifragilisticexpialidocious')) doc = """The wrap() method is just like fill() except that it returns a list of strings instead of one big string with newlines to separate the wrapped lines.""" print(textwrap.fill(doc, width=40)) locale.setlocale(locale.LC_ALL, 'en_US.utf8') # locale -a conv = locale.localeconv() x = 1234567.8 print(locale.format("%d", x, grouping=True)) print(locale.format_string("%s%.*f", (conv['currency_symbol'], conv['frac_digits'], x), grouping=True)) print() with open("test.zip", "rb") as f: data = f.read() start = 0 for i in range(1): start += 14 fields = struct.unpack('<IIIHH', data[start:start + 16]) crc32, comp_size, uncomp_size, filenamesize, extra_size = fields start += 16 filename = data[start:start + filenamesize] start += filenamesize print(data[start:start + extra_size]) print(filename, hex(crc32), comp_size, uncomp_size) start += extra_size + comp_size # skip to the next header print(sys.platform) print(sys.getdefaultencoding()) S = 'eggs' B = S.encode() print(bytes(S, encoding='ascii')) print(B.decode()) print(str(B, encoding='ascii')) print(re.split(':', 'aa:ss:dd:ff')) print('哈哈哈'.encode('utf-8').decode('utf-8'))
def _format_args_and_kwargs(args, kwargs): """Format function arguments and keyword arguments. Special case for a single parameter: ('hello',) is formatted as ('hello'). """ # use reprlib to limit the length of the output items = [] if args: items.extend(reprlib.repr(arg) for arg in args) if kwargs: items.extend('{}={}'.format(k, reprlib.repr(v)) for k, v in kwargs.items()) return '({})'.format(', '.join(items))
def _serve(self): if self._verbose: print("Wait for connection ...") conn, address = self._socket.accept() if self._verbose: print("Accepted connection from %s" % repr(address)) if not self._verify(conn, address): print("*** Connection from %s refused" % repr(address)) conn.close() return rf = conn.makefile('r') wf = conn.makefile('w') ok = 1 while ok: wf.flush() if self._verbose > 1: print("Wait for next request ...") ok = self._dorequest(rf, wf)
def __repr__(self): return '{}(key_min={}, key_max={}, key_stride={}, values={})'.format( self.__class__.__name__, self._key_min, self._key_max, self._key_stride, reprlib.repr(self._values))
def write( self, address, value, **kwargs ): """ Writes the value; if the PLC is online, logs at a relatively aggressive level.""" ( log.detail if self.online else log.normal )( "%s/%6d <%s %s" % ( self.description, address, "x=" if not self.online else "==", reprlib.repr( value ))) if not self.online: raise PlcOffline( "Write to PLC %s/%6dd failed: Offline" % ( self.description, address )) self._write( address, value, **kwargs )
def echo_server( conn, addr ): """Serve one echo client 'til EOF; then close the socket""" source = cpppo.chainable() with echo_machine( "echo_%s" % addr[1] ) as echo_line: eof = False while not eof: data = cpppo.dotdict() # See if a line has been recognized, stopping at terminal state. If this machine # is ended early due to an EOF, it should still terminate in a terminal state for mch, sta in echo_line.run( source=source, data=data ): if sta is not None: continue # Non-transition; check for input, blocking if non-terminal and none left. On # EOF, terminate early; this will raise a GeneratorExit. timeout = 0 if echo_line.terminal or source.peek() is not None else None msg = network.recv( conn, timeout=timeout ) if msg is not None: eof = not len( msg ) log.info( "%s recv: %5d: %s", echo_line.name_centered(), len( msg ), "EOF" if eof else reprlib.repr( msg )) source.chain( msg ) if eof: break # Terminal state (or EOF). log.detail( "%s: byte %5d: data: %r", echo_line.name_centered(), source.sent, data ) if echo_line.terminal: conn.send( data.echo ) log.info( "%s done", echo_line.name_centered() )
def log_mutations(execute, sql, params, many, context): """ Log every non-SELECT query executed. """ if not sql.startswith('SELECT '): log.debug('Query {sql} params={params} many={many!r}', sql=sql, params=reprlib.repr(params), many=many) return execute(sql, params, many, context)
def __repr__(self): class_name = type(self).__name__ if len(self)==0: components="" else: components = reprlib.repr(self[0]) return '{}([{},...])'.format(class_name,components)
def dump(obj): print("Type") print("====") print(type(obj)) print() print("Documentation") print("=============") # print(obj.__doc__) print(inspect.getdoc(obj)) print() print("Attributes") print("==========") all_attr_names = SortedSet(dir(obj)) method_names = SortedSet( filter(lambda attr_name: callable(getattr(obj, attr_name)), all_attr_names) ) assert method_names <= all_attr_names attr_names = all_attr_names - method_names attr_names_and_values = [(name, reprlib.repr(getattr(obj, name))) for name in attr_names] print_table(attr_names_and_values, "Name", "Value") print() print("Methods") print("=======") methods = (getattr(obj, method_name) for method_name in method_names) method_names_and_doc = [(full_sig(method), brief_doc(method)) for method in methods] print_table(method_names_and_doc, "Name", "Description") print()
def tnet_server( conn, addr ): """Serve one tnet client 'til EOF; then close the socket""" source = cpppo.chainable() with tnet_machine( "tnet_%s" % addr[1] ) as tnet_mesg: eof = False while not eof: data = cpppo.dotdict() # Loop blocking for input, while we've consumed input from source since the last time. # If we hit this again without having used any input, we know we've hit a symbol # unacceptable to the state machine; stop for mch, sta in tnet_mesg.run( source=source, data=data ): if sta is not None: continue # Non-transition; check for input, blocking if non-terminal and none left. On # EOF, terminate early; this will raise a GeneratorExit. timeout = 0 if tnet_mesg.terminal or source.peek() is not None else None msg = network.recv( conn, timeout=timeout ) # blocking if msg is not None: eof = not len( msg ) log.info( "%s: recv: %5d: %s", tnet_mesg.name_centered(), len( msg ), "EOF" if eof else reprlib.repr( msg )) source.chain( msg ) if eof: break # Terminal state (or EOF). log.detail( "%s: byte %5d: data: %r", tnet_mesg.name_centered(), source.sent, data ) if tnet_mesg.terminal: res = json.dumps( data.tnet.type.input, indent=4, sort_keys=True ) conn.send(( res + "\n\n" ).encode( "utf-8" )) log.info( "%s done", tnet_mesg.name_centered() )
def __repr__(self): return '{}({}, {}, {}, {})'.format( self.__class__.__name__, self.key_min(), self.key_max(), self._key_stride, reprlib.repr(self._values))
def __init__(self, data, metadict): self._time = [x[0] for x in data] self._amplitude = [x[1] for x in data] self._error = [x[2] for x in data] self.metadata = metadict self.filename = None components = reprlib.repr(list(itertools.islice(self.timeseries,0,10))) self.components = components[components.find('['):]
def log_on_error(execute, sql, params, many, context): """ Log a query if an exception is raised. """ try: return execute(sql, params, many, context) except Exception: log.debug('Query {sql} params={params} many={many!r}', sql=sql, params=reprlib.repr(params), many=many) raise
def reprlib(in_str): """The reprlib module. It provides a version of repr() customized for abbreviated displays of a large or deeply nested containers. """ import reprlib return reprlib.repr(set(in_str))
def __repr__(self): """ Returns ------- string A string representation of the sequence of time series times and \ values. Truncates longer sequences using the reprlib library. """ return str("TimeSeries" + reprlib.repr(list(zip(self.times, self.values))))
def _format_args(args): """Format function arguments. Special case for a single parameter: ('hello',) is formatted as ('hello'). """ # use reprlib to limit the length of the output args_repr = reprlib.repr(args) if len(args) == 1 and args_repr.endswith(',)'): args_repr = args_repr[:-2] + ')' return args_repr
def udpServe(key=b'this is my key value!',addr=('localhost',8080)) : s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM) nonce = pysodium.randombytes(8) ciphertext = pysodium.crypto_aead_chacha20poly1305_encrypt(b'this is my key value!',None,nonce,key) plaintext = pysodium.crypto_aead_chacha20poly1305_decrypt(ciphertext,None,nonce,key) print(plaintext) print(ciphertext) print(reprlib.repr(ciphertext)) print(nonce) print(ciphertext+nonce) s.sendto(ciphertext+nonce,addr) s.close()
def _store( self, address, value ): """ Remember data value(s) received; by default, just store it/them in or _data table. Any value stored while offline is lost (this will only occur under simulated PLCs, of course)! Logs at only high logging levels, due to large amounts of output (eg. due to polling). """ log.detail( "%s/%6d %s> %s" % ( self.description, address, "-x" if not self.online else "--", reprlib.repr( value ))) if self.online: if isinstance( value, (list,tuple) ): for offset in xrange( len( value )): self._data[address+offset] = value[offset] else: self._data[address] = value
def process( self, source, machine=None, path=None, data=None ): """Convert the collected data according to the type""" tntype = next( source ) ours = self.context( path ) raw = ours + '...data.input' src = ( data[raw].tostring() if sys.version_info.major < 3 else data[raw].tobytes() ) if tntype == b','[0]: log.info("%5d bytes data: %s", len( src ), reprlib.repr( src )) data[ours] = src elif tntype == b'$'[0]: log.info("%5d string data: %s", len( src ), reprlib.repr( src )) data[ours] = src.decode( 'utf-8' ) elif tntype == b'#'[0]: data[ours] = int( src ) log.info("%5d int data: %s == %s", len( src ), reprlib.repr( src ), reprlib.repr( data[ours] )) elif tntype == b'~'[0]: assert 0 == len( src ) data[ours] = None else: assert False, "Invalid tnetstring type: %s" % tntype
def _verify(self, conn, address): import string challenge = self._generate_challenge() conn.send("%d\n" % challenge) response = "" while "\n" not in response and len(response) < 100: data = conn.recv(100) if not data: break response = response + data try: response = string.atol(string.strip(response)) except string.atol_error: if self._verbose > 0: print("Invalid response syntax", repr(response)) return 0 if not self._compare_challenge_response(challenge, response): if self._verbose > 0: print("Invalid response value", repr(response)) return 0 if self._verbose > 1: print("Response matches challenge. Go ahead!") return 1
def sizeof(o): if id(o) in seen: # do not double count the same object return 0 seen.add(id(o)) s = getsizeof(o, default_size) if verbose: print(s, type(o), repr(o), file=stderr) for typ, handler in all_handlers.items(): if isinstance(o, typ): s += sum(map(sizeof, handler(o))) break return s
# PASEO POR LA BIBLIOTECA II #FORMATO DE SALIDA import reprlib print(reprlib.repr(set('supercalifragilisticoespialidoso'))) # GENERADOR DE IMPRESIONES CON FORMATO import pprint t = [[[['negro', 'turquesa'], 'blanco', ['verde', 'rojo']], [['magenta', 'amarillo'], 'azul']]] pprint.pprint(t, width=30) import textwrap doc = """El método wrap() es como fill(), excepto que devuelve una lista de strings en lugar de una gran string con saltos de línea como separadores.""" print(textwrap.fill(doc, width=40)) import locale print(locale.setlocale(locale.LC_ALL, '')) conv = locale.localeconv() # obtener un mapeo de convenciones x = 1234567.8 print(locale.format("%d", x, grouping=True)) print( locale.format_string("%s%.*f", (conv['currency_symbol'], conv['frac_digits'], x), grouping=True)) # PLANTILLAS from string import Template t = Template('${village} folk send $$10 to $cause.') print(t.safe_substitute(village='Nottingham', cause='the ditch fund'))
def __repr__(self): components = reprlib.repr( self._components) # 获取 self._components 的有限长度表示 components = components[components.find('['): -1] # 去掉前面的 array( 'd 和后面的的 ) return 'Vector({})'.format(components)
def __repr__(self): components = reprlib.repr([round(c, 3) for c in self.components]) idx = components.find("[") components = components[idx:] return f"Vector({components})"
def __repr__(self): return "<Index Ver {0.version}, content {1}>".format( self, reprlib.repr(self.index))
def __repr__(self): components = reprlib.repr(self._components) components = components[components.find("["):-1] return "Vector({})".format(components)
def __repr__(self): cls = type(self) components = reprlib.repr(self._components) components = components[components.find('['):-1] return '{.__name__!r} ({})'.format(cls, components)
def __repr__(self): return 'Sentence(%s)' % reprlib.repr(self.text) # abbr if too long
def format_stack_entry(self, frame_lineno, lprefix=': ', context=None): if context is None: context = self.context try: context=int(context) if context <= 0: print("Context must be a positive integer", file=self.stdout) except (TypeError, ValueError): print("Context must be a positive integer", file=self.stdout) try: import reprlib # Py 3 except ImportError: import repr as reprlib # Py 2 ret = [] Colors = self.color_scheme_table.active_colors ColorsNormal = Colors.Normal tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal) tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal) tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal) frame, lineno = frame_lineno return_value = '' if '__return__' in frame.f_locals: rv = frame.f_locals['__return__'] #return_value += '->' return_value += reprlib.repr(rv) + '\n' ret.append(return_value) #s = filename + '(' + `lineno` + ')' filename = self.canonic(frame.f_code.co_filename) link = tpl_link % py3compat.cast_unicode(filename) if frame.f_code.co_name: func = frame.f_code.co_name else: func = "<lambda>" call = '' if func != '?': if '__args__' in frame.f_locals: args = reprlib.repr(frame.f_locals['__args__']) else: args = '()' call = tpl_call % (func, args) # The level info should be generated in the same format pdb uses, to # avoid breaking the pdbtrack functionality of python-mode in *emacs. if frame is self.curframe: ret.append('> ') else: ret.append(' ') ret.append(u'%s(%s)%s\n' % (link,lineno,call)) start = lineno - 1 - context//2 lines = linecache.getlines(filename) start = min(start, len(lines) - context) start = max(start, 0) lines = lines[start : start + context] for i,line in enumerate(lines): show_arrow = (start + 1 + i == lineno) linetpl = (frame is self.curframe or show_arrow) \ and tpl_line_em \ or tpl_line ret.append(self.__format_line(linetpl, filename, start + 1 + i, line, arrow = show_arrow) ) return ''.join(ret)
def format_stack_entry( dbg_obj, frame_lineno, lprefix=": ", include_location=True, color="plain" ): """Format and return a stack entry gdb-style. Note: lprefix is not used. It is kept for compatibility. """ frame, lineno = frame_lineno filename = frame2file(dbg_obj.core, frame) s = "" if frame.f_code.co_name: funcname = frame.f_code.co_name else: funcname = "<lambda>" pass s = format_token(Mformat.Function, funcname, highlight=color) args, varargs, varkw, local_vars = inspect.getargvalues(frame) if "<module>" == funcname and ([], None, None,) == ( args, varargs, varkw, ): is_module = True if is_exec_stmt(frame): fn_name = format_token(Mformat.Function, "exec", highlight=color) source_text = deparse_source_from_code(frame.f_code) s += " %s(%s)" % ( format_token(Mformat.Function, fn_name, highlight=color), source_text, ) else: fn_name = get_call_function_name(frame) if fn_name: source_text = deparse_source_from_code(frame.f_code) if fn_name: s += " %s(%s)" % ( format_token(Mformat.Function, fn_name, highlight=color), source_text, ) pass else: is_module = False try: parms = inspect.formatargvalues(args, varargs, varkw, local_vars) except: pass else: maxargstrsize = dbg_obj.settings["maxargstrsize"] if len(parms) >= maxargstrsize: parms = "%s...)" % parms[0:maxargstrsize] pass s += parms pass # Note: ddd can't handle wrapped stack entries (yet). # The 35 is hoaky though. FIXME. if len(s) >= 35: s += "\n " if "__return__" in frame.f_locals: rv = frame.f_locals["__return__"] s += "->" s += format_token(Mformat.Return, Mrepr.repr(rv), highlight=color) pass if include_location: is_pseudo_file = _re_pseudo_file.match(filename) add_quotes_around_file = not is_pseudo_file if is_module: if filename == "<string>": s += " in exec" elif not is_exec_stmt(frame) and not is_pseudo_file: s += " file" elif s == "?()": if is_exec_stmt(frame): s = "in exec" # exec_str = get_exec_string(frame.f_back) # if exec_str != None: # filename = exec_str # add_quotes_around_file = False # pass # pass elif not is_pseudo_file: s = "in file" pass pass elif not is_pseudo_file: s += " called from file" pass if add_quotes_around_file: filename = "'%s'" % filename s += " %s at line %s" % ( format_token(Mformat.Filename, filename, highlight=color), format_token(Mformat.LineNumber, "%r" % lineno, highlight=color), ) return s
def __repr__(self): return "Sentence(%s)" % reprlib.repr(self.text)
def __repr__(self): return 'Sentence({})'.format(reprlib.repr(self.text))
def train(config, hdfs_prefix, ftime, gap, ckpt_dir, export_dir, metric_dir): """Entry for trainig Args: config: (configparser) All the hyperparameters for training """ train_files = [] dev_files = [] cur_date = datetime.datetime.strptime(ftime, "%Y%m%d") for i in range(1, gap + 1): dest_date = (cur_date + datetime.timedelta(days=-i)).strftime("%Y%m%d") train_files.append(hdfs_prefix + "/" + dest_date + "/train/part-r-*") dev_files.append(hdfs_prefix + "/" + dest_date + "/test/part-r-*") logging.info('train files: {}'.format(reprlib.repr(train_files))) logging.info('dev files: {}'.format(reprlib.repr(dev_files))) #特征的配置文件 在input 这个section的spec这个key feature_config = configparser.ConfigParser() feature_config.read(config['input']['spec']) #特征配置文件 有boundaries等信息 columns, spec = FCGen.GetFeatureSpec( feature_config) #按特征列对特征进行处理,不同类型处理会不一样,比如数值、embed等 batch_size = int(config['train']['batch_size']) conf = tf.ConfigProto() conf.gpu_options.allow_growth = True os.environ["CUDA_VISIBLE_DEVICES"] = "-1" run_config = tf.estimator.RunConfig(save_checkpoints_secs=1800).replace( session_config=conf) logging.info("Creating model...") # Define the model model = tf.estimator.BoostedTreesClassifier( config=run_config, n_batches_per_layer=1000, n_trees=100, learning_rate=0.2, l1_regularization=0.01, l2_regularization=0.01, max_depth=10, model_dir=ckpt_dir, feature_columns=list(columns['linear'].values()), weight_column=list(columns['weight'].values())[0] #如果有weight这个就不能注释 ) #model = tf.estimator.add_metrics(model, metric_auc) # Train and evaluate epochs = int(config['train']['epochs']) for i in range(epochs): logging.info("training...") model.train(input_fn=lambda: input_fn( train_files, spec, shuffle=True, batch_size=batch_size)) results = model.evaluate(input_fn=lambda: input_fn( dev_files, spec, shuffle=False, batch_size=batch_size)) auc = float(results["auc"]) logloss = float(results["loss"]) index = [{ "name": "auc", "type": "float", "value": str(auc) }, { "name": "logloss", "type": "float", "value": str(logloss) }] file_name = metric_dir + "/metrics_info.json" with open(file_name, 'w') as file_obj: json.dump(index, file_obj) model.export_savedmodel( export_dir_base=export_dir, serving_input_receiver_fn=lambda: input_receiver(spec), strip_default_attrs=True)
def __repr__(self): class_name = type(self).__name__ components = reprlib.repr(list(itertools.islice(self.timeseries,0,10))) components = components[components.find('['):] return '{}({})'.format(class_name, components)
def __repr__(self): return 'Paragraph(%s)' % reprlib.repr(self.text)
def train(config , trainfile, testfile): """Entry for trainig Args: config: (configparser) All the hyperparameters for training """ prefix = "/data/home/graywang/esmm/tfrecords/rt_mt" train_dirs = trainfile.split(',') cluster = "hdfs://ss-sng-dc-v2/stage/outface/SNG/g_sng_qqmusic_develop/g_sng_qqmusic_develop/timmili/gray_temp/" if config['train']['source'] == 'hdfs': train_files = [[] for _ in range(5)] for train_dir in train_dirs: for i in range(5): train_files[i].append(cluster + train_dir + "/part-r-00" + str(i) + "*") else: train_files = [[] for _ in range(4)] for train_dir in train_dirs: for f in os.listdir(prefix + "/" + train_dir): if len(os.listdir(prefix + "/" + train_dir)) > 250: div = 125 else: div = 50 if f != "_SUCCESS": ind = int(int(f.split('-')[-1]) / div) train_files[ind].append(os.path.join(prefix, train_dir, f)) logging.info('train directory: {}'.format(train_dirs)) logging.info('train files: {}'.format(reprlib.repr(train_files))) dev_dirs = testfile.split(',') dev_files = [os.path.join(prefix, dev_dir, f) for dev_dir in dev_dirs for f in os.listdir(prefix + "/" + dev_dir) if f != "_SUCCESS"] logging.info('dev directory: {}'.format(dev_dirs)) logging.info('dev files: {}'.format(reprlib.repr(dev_files))) #logging.info('dev directory: {}'.format(dev_dirs)) #logging.info('dev files: {}'.format(reprlib.repr(dev_files))) #特征的配置文件 在input 这个section的spec这个key feature_config = configparser.ConfigParser() feature_config.read(config['input']['spec'])#特征配置文件 有boundaries等信息 columns, spec, dimension_config = FCGen.GetFeatureSpec(feature_config)#按特征列对特征进行处理,不同类型处理会不一样,比如数值、embed等 batch_size = int(config['train']['batch_size']) conf = tf.ConfigProto() conf.gpu_options.allow_growth=True dimension_config = {} os.environ["CUDA_VISIBLE_DEVICES"] = "0" run_config = tf.estimator.RunConfig().replace( model_dir=config['train'].get('model_dir', 'model_dir'), save_checkpoints_secs=3600, session_config=conf) dynamic = config['train']['dynamic'] == 'true' warm_dir = config['train'].get('warm_dir', '') if len(warm_dir) > 1: ws = tf.estimator.WarmStartSettings(ckpt_to_initialize_from=warm_dir, vars_to_warm_start=".*") else: ws = None print("dynamic:", dynamic) logging.info("Creating model...") # Define the model hidden_units = [int(n) for n in config['model']['hidden_units'].split(',')] learning_rate = float(config['model']['learning_rate']) ctr_reg = float(config['model'].get('ctr_reg', '1e-4')) cvr_reg = float(config['model'].get('cvr_reg', '1e-4')) ctcvr_loss_weight = float(config['model'].get('ctcvr_loss_weight', '1.0')) model = tf.estimator.Estimator( model_fn=esmm_model_fn, params={ 'cat_columns': columns['cat'], 'val_columns': columns['val'], 'dnn_columns': list(columns['dnn'].values()), 'weight_columns': list(columns['weight'].values())[0], 'column_to_field': {}, 'hidden_units': hidden_units, 'learning_rate': learning_rate, 'ctr_reg': ctr_reg, 'cvr_reg': cvr_reg, 'reg': 1e-4, 'dimension_config': dimension_config, 'ctcvr_loss_weight': ctcvr_loss_weight, 'model': config['model']['model'], 'embed_dim': int(config['model']['embedding_dim']), 'dynamic': dynamic }, config = run_config, warm_start_from=ws ) # Train and evaluate max_steps = config['train'].get('max_step', '') if max_steps == '': max_steps = None else: max_steps = int(max_steps) #for variable_name in model.get_variable_names(): # print(variable_name) logging.info("training...") epochs = int(config['train'].get('epochs', '1')) #train_input_fn = lambda: input_fn(train_files, spec, True, batch_size, mt=True) if config['train']['source'] == 'hdfs': input_func = input_fn_pattern else: input_func = input_fn eval_input_fn = lambda: input_fn(dev_files, spec, False, batch_size, mt=True) for i in range(epochs): logging.info("{}th training...".format(i+1)) for j in range(len(train_files)): model.train(input_fn=lambda: input_func(train_files[j], spec, True, batch_size, mt=True)) results = model.evaluate(input_fn=eval_input_fn) logging.info("{}th test results...".format(i+j+1)) for key in sorted(results): print('%s: %s' % (key, results[key])) model.export_savedmodel(export_dir_base=config['train'].get('export_dir', 'export_dir'), serving_input_receiver_fn=lambda: input_receiver(spec), strip_default_attrs=True)
def __repr__(self): return 'Sentence(%s)' % reprlib.repr(self.text)
import reprlib values = [ x / (x - y) for x in range(100) if x > 50 for y in range(100) if x - y != 0 ] values = [ x / (x - y) for x in range(100) if x > 50 for y in range(100) if x - y != 0 ] values = [] for x in range(100): if x > 50: for y in range(100): if x - y != 0: values.append(x / (x - y)) print(reprlib.repr(values))
def __repr__(self): components = reprlib.repr(self._components) components = components[components.find('['):-1] return 'Vector({})'.format(components)
def __repr__(self): return reprlib.repr(RemoteClassAttr(self, None))
def __repr__(self): # use reprlib.repr to produce limited length representations components = reprlib.repr(self._components) components = components[components.find('['):-1] return 'Vector({})'.format(components)
def __repr__(self): components = reprlib.repr(self._components) components = components[components.find('['):-1] return f'{self._classname}({components})'
def __repr__(self): return ('Sentence : %s' % (reprlib.repr(self.__sentence)))
def _get_local_repr(self, path): try: obj = self._get_local_object(path) return reprlib.repr(obj) except AttributeError: return None
class TestStatisticalFunctions(unittest.TestCase): def test_average(self): self.assertEqual(average([20, 30, 70]), 40.0) self.assertEqual(round(average([1, 5, 7]), 1), 4.3) with self.assertRaises(ZeroDivisionError): average([]) with self.assertRaises(TypeError): average([20, 30, 70]) # unittest.main() # Calling from the command line invokes all tests import reprlib it = reprlib.repr(set('supercalifragilisticexpialidocious')) print(it) import pprint t = [[[['black', 'cyan'], 'white', ['green', 'red']], [['magenta', 'yellow'], 'blue']]] pprint.pprint(t, width=30) import textwrap doc = """The wrap() method is just like fill() except that it returns a list of strings instead of one big string with newlines to separate the wrapped lines.""" print(textwrap.fill(doc, width=40))
def __repr__(self): components = reprlib.repr(self._components) s_index = slice(components.find('['), -1) components = components[s_index] return 'Vector({})'.format(components)
def __repr__(self): return 'Plural words(%s)' % reprlib.repr( ['four ' + word + 's' for word in self.words])
def __repr__(self): return '[%s] %s' % (self.__class__.__name__, reprlib.repr(self.text))
def train(config, trainfile, testfile, time): """Entry for trainig Args: config: (configparser) All the hyperparameters for training """ keys = get_norm_keys(config['input'].get('conf')) #print("get_norm_keys") #print(keys) train_dirs = trainfile.split(',') train_files = [train_dir + 'part-r-*' for train_dir in train_dirs] #train_files = tf.random_shuffle(tf.train.match_filenames_once([os.path.join(train_dir, f) for f in os.listdir(train_dir) if f != "_SUCCESS"])) #train_files = tf.random_shuffle(tf.train.match_filenames_once(['%s/%s/part-r-*' % (data_path, dt) for dt in date_list])) logging.info('train directory: {}'.format(train_dirs)) logging.info('train files: {}'.format(reprlib.repr(train_files))) #dev_dirs = ["/data/home/timmili/dssm_venus/"] #dev_files = ["/data/home/timmili/dssm_venus/part-r-00199"] dev_dirs = testfile.split(',') dev_files = [dev_dir + 'part-r-*' for dev_dir in dev_dirs] logging.info('dev directory: {}'.format(dev_dirs)) logging.info('dev files: {}'.format(reprlib.repr(dev_files))) #特征的配置文件 在input 这个section的spec这个key feature_config = configparser.ConfigParser() feature_config.read(config['input']['spec']) #特征配置文件 有boundaries等信息 columns, spec = FCGen.GetFeatureSpec( feature_config) #按特征列对特征进行处理,不同类型处理会不一样,比如数值、embed等 print(columns.keys()) print("++++++user_columns") for v in columns['user_columns']: print(v) print("++++++item_columns") for v in columns['item_columns']: print(v) batch_size = int(config['train']['batch_size']) conf = tf.ConfigProto() conf.gpu_options.allow_growth = True os.environ["CUDA_VISIBLE_DEVICES"] = "5" model_dir = config['train'].get('model_dir', 'model_dir') + time + "/" run_config = tf.estimator.RunConfig().replace(model_dir=model_dir, session_config=conf) train_input_fn = lambda: input_fn_pattern(train_files, spec, True, batch_size) eval_input_fn = lambda: input_fn_pattern(dev_files, spec, False, batch_size ) logging.info("Creating model...") # Define the model hidden_units = [int(n) for n in config['model']['hidden_units'].split(',')] learning_rate = float(config['model']['learning_rate']) ctr_reg = float(config['model'].get('ctr_reg', '1e-6')) cvr_reg = float(config['model'].get('cvr_reg', '1e-4')) ctcvr_loss_weight = float(config['model'].get('ctcvr_loss_weight', '1.0')) model = tf.estimator.Estimator(model_fn=dssm_model_fn, params={ 'user_columns': list(columns['user_columns'].values()), 'item_columns': list(columns['item_columns'].values()), 'hidden_units': hidden_units, 'learning_rate': learning_rate, 'ctr_reg': ctr_reg, 'cvr_reg': cvr_reg, 'ctcvr_loss_weight': ctcvr_loss_weight, 'model': config['model']['model'], 'embedding_size': int(config['model'].get( 'embedding_size', '4')) }, config=run_config) # Train and evaluate max_steps = config['train'].get('max_step', '') if max_steps == '': max_steps = None else: max_steps = int(max_steps) logging.info("training...") epochs = int(config['train'].get('epochs', '1')) export_dir_base = config['train'].get('export_dir', 'export_dir') + time + "/" predictions = model.predict(input_fn=eval_input_fn) print("+++++++++++++999c") print(list(predictions)) for x in predictions: print(x)
def __repr__(self): components = reprlib.repr(self._components) # 제한된 길이로 출력 components = components[components.find('['): -1] # 문자열 중 앞에 나오는 "array('d'," 를 제거 return 'Vector({})'.format(components)
class Point2D: def __init__(self, x, y): self.x = x self.y = y def __str__(self): return '({}, {})'.format(self.x, self.y) def __repr__(self): return 'Point2D(x={}, y={})'.format(self.x, self.y) def __format__(self, f): if f == 'r': return '{}, {}'.format(self.y, self.x) else: return '{}, {}'.format(self.x, self.y) import reprlib points = [Point2D(x, y) for x in range(1000) for y in range(1000)] print(len(points)) print(reprlib.repr(points))