def main(): import DNS, timing, socket, time res = {} for server in servers: res[server] = [100000, 0, 0, 0] # min,max,tot,failed for what, querytype in lookups: for count in range(rpts): for server in servers: d = DNS.DnsRequest(server=server, timeout=1) fail = 0 timing.start() try: r = d.req(name=what, qtype=querytype) except DNS.Error: fail = 1 timing.finish() if fail: res[server][3] = res[server][3] + 1 print "(failed)", res[server][3] if 0: if r.header['ancount'] == 0: print "WARNING: Server",server,"got no answers for", \ what, querytype t = timing.milli() print server, "took", t, "ms for", what, querytype res[server][0] = min(t, res[server][0]) res[server][1] = max(t, res[server][1]) res[server][2] = res[server][2] + t for server in servers: queries = rpts * len(lookups) r = res[server] print "%-30s %2d/%2d(%3.2f%%) %dms/%dms/%dms min/avg/max" % ( socket.gethostbyaddr(server)[0], queries - r[3], queries, ((queries - r[3]) * 100.0) / queries, r[0], r[2] / queries, r[1])
def process(args): if os.path.exists(options.output) and options.erase: shutil.rmtree(options.output) if not os.path.exists(options.output): os.makedirs(options.output) if options.static is not None: static = options.static.split(',') for dir in static: outpath = os.path.join(options.output, dir) if os.path.exists(outpath): shutil.rmtree(outpath) copytree(dir, outpath) if options.trac and os.path.isdir(options.trac): projects = read_trac(options.trac) else: if options.data: projects = read_data(options.data) else: projects = [] projects.sort(key=operator.itemgetter('desc'), cmp=locale.strcoll) timing.start() for dirpath, dirnames, filenames in os.walk(options.input): try: process_dir(dirpath, filenames, projects) except: if options.keepgoing: print 'Error!' else: raise timing.finish() print 'Website build time: %s' % timing.milli()
def wrapper(*args, **kw): timing.start() function(*args, **kw) timing.finish() f = open("timing.csv", "a") f.write("%s,%s,%d\n"%(datetime.now(), function.__name__, timing.micro())) f.flush() f.close()
def call_method(num): global process_size timing.start() z = eval('method' + str(num))() timing.finish() print "method", num print "time", float(timing.micro()) / 1000000 print "output size ", len(z) / 1024, "kb" print "process size", process_size, "kb" print
def _cursor_execute(cursor,sql, vars): utils.sql_log("\n%s\n" % sql) utils.sql_log("\n%s\n" % str(vars)) # TODO: maybe time things only if debug flag is on timing.start() cursor.execute(sql, vars) timing.finish() elapsed = timing.milli() if elapsed > config.TIMING_THRESHOLD: utils.sql_log("TIMING: " + str(elapsed))
def __release_flvfile(self, flags): self.file.close() if self.cmd in ["mplayer"]: timing.finish() # watchtime is in seconds wt = (timing.milli() / 1000.0) + self.entry.get_watchtime() self.entry.set_watchtime(wt) self.entry.set_count(self.entry.get_count() + 1) stats.categorize(self.entry) stats.update_stat_lists(self.entry) self.detach_subtitles()
def testpacker(): N = 2500 R = range(N) import timing # See section 4.1.4 of RFC 1035 timing.start() for i in R: p = Packer() p.addaddr('192.168.0.1') p.addbytes('*' * 20) p.addname('f.ISI.ARPA') p.addbytes('*' * 8) p.addname('Foo.F.isi.arpa') p.addbytes('*' * 18) p.addname('arpa') p.addbytes('*' * 26) p.addname('') timing.finish() print timing.milli(), "ms total for packing" print round(timing.milli() / i, 4), 'ms per packing' #p.dump() u = Unpacker(p.buf) u.getaddr() u.getbytes(20) u.getname() u.getbytes(8) u.getname() u.getbytes(18) u.getname() u.getbytes(26) u.getname() timing.start() for i in R: u = Unpacker(p.buf) res = (u.getaddr(), u.getbytes(20), u.getname(), u.getbytes(8), u.getname(), u.getbytes(18), u.getname(), u.getbytes(26), u.getname()) timing.finish() print timing.milli(), "ms total for unpacking" print round(timing.milli() / i, 4), 'ms per unpacking'
def time(bmark, gs, ls, N=1, force_exit=0): code = compiler.compile(bmark, "benchmarks", "exec") try: timing.start() for i in range(N): exec(code, ls, gs) timing.finish() t = timing.micro() / 10.0**6 if t > MIN_TIME_RESOLUTION or force_exit: return t / N # (t - otime(gs, ls, N))/N else: return time(bmark, gs, ls, N * 2) except: return None
def time(bmark, gs, ls, N=1, force_exit=0): code = compiler.compile(bmark,"benchmarks","exec") try: timing.start() for i in range(N): exec(code, ls, gs) timing.finish() t = timing.micro()/10.0**6 if t > MIN_TIME_RESOLUTION or force_exit: return t/N # (t - otime(gs, ls, N))/N else: return time(bmark, gs, ls, N*2) except: return None
def play_game(game, *players): "Play an n-person, move-alternating game." #timing added from timing.py timing.start() state = game.initial while True: for player in players: move = player(game, state) state = game.make_move(move, state) if game.terminal_test(state): game.display(state) timing.finish() sec = timing.seconds() thousandths = (timing.milli() % 1000) #print "\n{0}.{1} seconds\n\n".format(sec, thousandths) return game.end_game(game.utility(state, players[0]), state)
def testpacker(): N = 2500 R = range(N) import timing # See section 4.1.4 of RFC 1035 timing.start() for i in R: p = Packer() p.addaddr('192.168.0.1') p.addbytes('*' * 20) p.addname('f.ISI.ARPA') p.addbytes('*' * 8) p.addname('Foo.F.isi.arpa') p.addbytes('*' * 18) p.addname('arpa') p.addbytes('*' * 26) p.addname('') timing.finish() print(timing.milli(), "ms total for packing") print(round(timing.milli() / i, 4), 'ms per packing') # p.dump() u = Unpacker(p.buf) u.getaddr() u.getbytes(20) u.getname() u.getbytes(8) u.getname() u.getbytes(18) u.getname() u.getbytes(26) u.getname() timing.start() for i in R: u = Unpacker(p.buf) res = (u.getaddr(), u.getbytes(20), u.getname(), u.getbytes(8), u.getname(), u.getbytes(18), u.getname(), u.getbytes(26), u.getname()) timing.finish() print(timing.milli(), "ms total for unpacking") print(round(timing.milli() / i, 4), 'ms per unpacking')
def __test_heap_impl(heap, data): nodes = [] for d in data: nodes.append(heap.make_node(d)) H = heap.make_heap() timing.start() for x in nodes: heap.insert(H, x) timing.finish() t_insert = timing.micro() timing.start() while not heap.is_empty(H): heap.extract(H) timing.finish() t_extract = timing.micro() return t_insert, t_extract
def testpacker(): N = 25 R = range(N) import timing # See section 4.1.4 of RFC 1035 timing.start() for i in R: p = Packer() p.addbytes('*' * 20) p.addname('f.ISI.ARPA') p.addbytes('*' * 8) p.addname('Foo.F.isi.arpa') p.addbytes('*' * 18) p.addname('arpa') p.addbytes('*' * 26) p.addname('') timing.finish() print round(timing.milli() * 0.001 / N, 3), 'seconds per packing' p.dump() u = Unpacker(p.buf) u.getbytes(20) u.getname() u.getbytes(8) u.getname() u.getbytes(18) u.getname() u.getbytes(26) u.getname() timing.start() for i in R: u = Unpacker(p.buf) res = (u.getbytes(20), u.getname(), u.getbytes(8), u.getname(), u.getbytes(18), u.getname(), u.getbytes(26), u.getname()) timing.finish() print round(timing.milli() * 0.001 / N, 3), 'seconds per unpacking' for item in res: print item
def test_python_structures(): num_vertices = 200 V = [] for i in xrange(num_vertices): V.append(i) G1 = {} timing.start() for u in V: for v in V: G1[u * len(V) + v] = True timing.finish() t1 = timing.micro() G2 = {} timing.start() for u in V: for v in V: G2[(u, v)] = True timing.finish() t2 = timing.micro() print "It took %d %d microseconds to insert %d elements into the dictionary."\ % (t1, t2, num_vertices**2)
def test_python_structures(): num_vertices = 200 V = [] for i in xrange(num_vertices): V.append(i) G1 = {} timing.start() for u in V: for v in V: G1[u*len(V)+v] = True timing.finish() t1 = timing.micro() G2 = {} timing.start() for u in V: for v in V: G2[(u,v)] = True timing.finish() t2 = timing.micro() print "It took %d %d microseconds to insert %d elements into the dictionary."\ % (t1, t2, num_vertices**2)
def main(): import DNS, timing, socket, time res = {} for server in servers: res[server] = [100000,0,0,0] # min,max,tot,failed for what,querytype in lookups: for count in range(rpts): for server in servers: d = DNS.DnsRequest(server=server,timeout=1) fail = 0 timing.start() try: r=d.req(name=what,qtype=querytype) except DNS.Error: fail = 1 timing.finish() if fail: res[server][3] = res[server][3] + 1 print "(failed)",res[server][3] if 0: if r.header['ancount'] == 0: print "WARNING: Server",server,"got no answers for", \ what, querytype t = timing.milli() print server,"took",t,"ms for",what,querytype res[server][0] = min(t,res[server][0]) res[server][1] = max(t,res[server][1]) res[server][2] = res[server][2] + t for server in servers: queries = rpts * len(lookups) r = res[server] print "%-30s %2d/%2d(%3.2f%%) %dms/%dms/%dms min/avg/max" % ( socket.gethostbyaddr(server)[0], queries - r[3], queries, ((queries-r[3])*100.0)/queries, r[0], r[2] / queries, r[1])
def process(args): if os.path.exists(options.output) and options.erase: shutil.rmtree(options.output) safe_makedir(options.output) if options.static is not None: static = options.static.split(','); for dir in static: outpath = os.path.join(options.output, dir) if os.path.exists(outpath): shutil.rmtree(outpath) copytree(dir, outpath) if options.input is not None: timing.start() for dirpath, dirnames, filenames in os.walk(options.input): try: process_dir(dirpath, filenames) except: if options.keepgoing: print 'Error!' else: raise timing.finish() if not options.rss: print 'Website build time: %s' % timing.milli()
def otime(gs, ls, N): timing.start() for i in range(N): pass timing.finish() return timing.micro()
from test_support import verbose import timing r = range(100000) if verbose: print 'starting...' timing.start() for i in r: pass timing.finish() if verbose: print 'finished' secs = timing.seconds() milli = timing.milli() micro = timing.micro() if verbose: print 'seconds:', secs print 'milli :', milli print 'micro :', micro
def measures_interface(): timing.start() POSITION = True # 30 EPOCH = True # 13 LISTCODES = True # 25 SOURCELIST = True # 39 LINELIST = True # 24 OBSLIST = True # 28 SPECTRALLINE = True # 40 FRAMENOW = True # 18 SHOWFRAME = True # 37 DIRECTION = True # 6 DIRSHOW = True # 7 COMETNAME = True # 3 COMETTYPE = True # 5 COMETTOPO = True # 4 RISE = True # 32 RISESET = True # 33 BASELINE = True # 2 EXPAND = True # 14 SEPARATION = True # 34 POSANGLE = True # 29 RADIALVELOCITY = True # 31 DOPPLER = True # 10 FREQUENCY = True # 19 SOURCE = True # 38 OBSERVATORY = True # 27 EARTHMAGNETIC = True # 12 UVW = True # 46 DONE = True # 9 TODOPPLER = True # 41 TOFREQUENCY = True # 42 TORADIALVELOCITY = True # 43 TORESTFREQUENCY = True # 44 TOUVW = True # 45 MEASURE = True # 26 GETREF = True # 21 GETOFFSET = True # 20 GETTYPE = True # 22 GETVALUE = True # 23 SHOW = True # 35 SHOWAUTO = True # 36 DOFRAME = True # 8 DOSHOWAUTO = True # 11 FRAMEAUTO = True # 15 FRAMECOMET = True # 16 FRAMENOAUTO = True # 17 ADDXVALUE = True # 0 ASBASELINE = True # 1 if (ADDXVALUE): addxvalue() if (ASBASELINE): asbaseline() if (BASELINE): baseline() if (COMETNAME): cometname() if (COMETTOPO): comettopo() if (COMETTYPE): comettype() if (DIRECTION): direction() if (DIRSHOW): dirshow() if (DOFRAME): doframe() if (DONE): done() if (DOPPLER): doppler() if (EARTHMAGNETIC): earthmagnetic() if (EPOCH): epoch() if (EXPAND): expand() if (FRAMECOMET): framecomet() if (FRAMENOW): framenow() if (FREQUENCY): frequency() if (GETOFFSET): getoffset() if (GETREF): getref() if (GETTYPE): gettype() if (GETVALUE): getvalue() if (LINELIST): linelist() if (LISTCODES): listcodes() if (MEASURE): measure() if (OBSERVATORY): observatory() if (OBSLIST): obslist() if (POSANGLE): posangle() if (POSITION): position() if (RADIALVELOCITY): radialvelocity() if (RISE): rise() if (RISESET): riseset() if (SEPARATION): separation() if (SHOW): show() if (SHOWFRAME): showframe() if (SOURCE): source() if (SOURCELIST): sourcelist() if (SPECTRALLINE): spectralline() if (TODOPPLER): todoppler() if (TOFREQUENCY): tofrequency() if (TORADIALVELOCITY): toradialvelocity() if (TORESTFREQUENCY): torestfrequency() if (TOUVW): touvw() if (UVW): uvw() timing.finish() print 'Measures interface time is: ', timing.milli() / 1000. return True
def __init__(self): if not pynotify.init("Init"): debug("Error: Failed to initialize pynotify.") sys.exit(1) parser = OptionParser(usage="%prog [options]", version="0.1", description="Tray based notifier for MPD.") parser.add_option('-v', dest='verbosity', help='Display debugging output', action="store_const", const=1) parser.add_option('-d', '--disable-lastfm', dest='lastfm', help='Disable lastfm functionality', action="store_false") parser.add_option('-l', '--disable-lyrics', dest='lyrics', help='Disable lyrics functionality', action="store_false") parser.set_defaults(**DEFAULTS) (option_obj, args) = parser.parse_args() options = {} options['verbosity'] = option_obj.verbosity options['lastfm'] = option_obj.lastfm options['lyrics'] = option_obj.lyrics # FIXME: Ewww. global DEBUG_LVL DEBUG_LVL = options['verbosity'] print options self.track = {} self.artist_url = None self.title_url = None self.uri = None self.tray = None self.menu = None self.buttons = None self.box = None self.n = None self.recent_tracks = [] self.recent_tracks_menu = None self.friends_menus = {} # Initialize 'modules' self.lastfm = Lastfm() self.lyrics = Lyrics() debug("* Populating playlist...") timing.start() self.playlist = mpdclient2.connect().playlistinfo() timing.finish() debug("..done. (%d seconds)" % timing.seconds()) if options['lastfm']: debug("* Enabling last.fm functionality") self.lastfm.enable() debug("* Fetching recently played tracks") self.recent_tracks = self.lastfm.fetch_recent_tracks(LAST_FM['USERNAME']) debug("* Starting friend grabs every minute") gobject.timeout_add(60000, self.lastfm.sched_friends_data) if options['lyrics']: debug("* Enabling lyrics functionality") self.lyrics.enable() self.create_tray() # Check for track change every second gobject.timeout_add(1000, self.check_for_change)
mesh = nmesh.mesh(objects=[box, ellipsoid], bounding_box=bbox,\ a0=0.3, mesh_bounding_box=True, fixed_points = fix, max_steps = N, density = density, neigh_force_scale = 0.0, shape_force_scale = shape_elem, volume_force_scale = 0.0 ) timing.finish() time.append(timing.seconds()) #iteration.append(N) #save the mesh as a .ps file in temp dir nmesh.visual.plot2d_ps(mesh,"fig_mesh_shape_%d_iter%06d.ps"\ % (shape_elem,N) ) #extract the mesh data from mesh_info mesh_info = mesh.tolists() vtkData, points, simplices, simplexIndicies, icradii, ccradii=\ nmesh.visual.mesh2vtk(mesh_info, VTKonly=False) in2circ = nmesh.visual.findRatios(icradii, ccradii, factor=2)#2D #count the number of point after iterations and add to list nodes.append(len(points))
def measures_interface(): timing.start() POSITION = True # 30 EPOCH = True # 13 LISTCODES = True # 25 SOURCELIST = True # 39 LINELIST = True # 24 OBSLIST = True # 28 SPECTRALLINE = True # 40 FRAMENOW = True # 18 SHOWFRAME = True # 37 DIRECTION = True # 6 DIRSHOW = True # 7 COMETNAME = True # 3 COMETTYPE = True # 5 COMETTOPO = True # 4 RISE = True # 32 RISESET = True # 33 BASELINE = True # 2 EXPAND = True # 14 SEPARATION = True # 34 POSANGLE = True # 29 RADIALVELOCITY = True # 31 DOPPLER = True # 10 FREQUENCY = True # 19 SOURCE = True # 38 OBSERVATORY = True # 27 EARTHMAGNETIC = True # 12 UVW = True # 46 DONE = True # 9 TODOPPLER = True # 41 TOFREQUENCY = True # 42 TORADIALVELOCITY = True # 43 TORESTFREQUENCY = True # 44 TOUVW = True # 45 MEASURE = True # 26 GETREF = True # 21 GETOFFSET = True # 20 GETTYPE = True # 22 GETVALUE = True # 23 SHOW = True # 35 SHOWAUTO = True # 36 DOFRAME = True # 8 DOSHOWAUTO = True # 11 FRAMEAUTO = True # 15 FRAMECOMET = True # 16 FRAMENOAUTO = True # 17 ADDXVALUE = True # 0 ASBASELINE = True # 1 if (ADDXVALUE): addxvalue() if (ASBASELINE): asbaseline() if (BASELINE): baseline() if (COMETNAME): cometname() if (COMETTOPO): comettopo() if (COMETTYPE): comettype() if (DIRECTION): direction() if (DIRSHOW): dirshow() if (DOFRAME): doframe() if (DONE): done() if (DOPPLER): doppler() if (EARTHMAGNETIC): earthmagnetic() if (EPOCH): epoch() if (EXPAND): expand() if (FRAMECOMET): framecomet() if (FRAMENOW): framenow() if (FREQUENCY): frequency() if (GETOFFSET): getoffset() if (GETREF): getref() if (GETTYPE): gettype() if (GETVALUE): getvalue() if (LINELIST): linelist() if (LISTCODES): listcodes() if (MEASURE): measure() if (OBSERVATORY): observatory() if (OBSLIST): obslist() if (POSANGLE): posangle() if (POSITION): position() if (RADIALVELOCITY): radialvelocity() if (RISE): rise() if (RISESET): riseset() if (SEPARATION): separation() if (SHOW): show() if (SHOWFRAME): showframe() if (SOURCE): source() if (SOURCELIST): sourcelist() if (SPECTRALLINE): spectralline() if (TODOPPLER): todoppler() if (TOFREQUENCY): tofrequency() if (TORADIALVELOCITY): toradialvelocity() if (TORESTFREQUENCY): torestfrequency() if (TOUVW): touvw() if (UVW): uvw() timing.finish() print 'Measures interface time is: ',timing.milli()/1000. return True
def run(self, function): timing.start() self.solution = function() timing.finish() self.time = timing.milli()
def __test__(): from testing import __ok__ import timing print('Testing persistant object storage...') name = '_test_persist' if os.path.isdir(name): print('Directory %s already exists. Cannot complete tests.' % name) __ok__(name, None) return if os.path.exists(name + '.db'): print('Dbm file %s.db already exists. Cannot complete tests.' % name) __ok__(name, None) return count = 500 class _other (object): pass class X (Identified, Persistent, _other): pass for __ in ['_other','X']: globals()[__] = locals()[__] # use a filestore p = FileStore(name) __ok__(p, p) x = X() p.add(x) id = x.id() __ok__(id is not None) __ok__(p.exists(id), id in p) y = fetch(id) __ok__(x.id() == y.id()) __ok__(x is y) found = glob.glob('./%s/*' % name) __ok__(len(found), 1) __ok__(found[0], found[0]) y = x = None p.destroy(id) found = glob.glob('./%s/*' % name) __ok__(len(found), 0) y = fetch(id) __ok__(y, None) p.close() os.rmdir(name) print('Testing persistant filestore...') # use a filestore for many items p = FileStore(name) x = X() p.add(x) id = x.id() __ok__(id is not None) p.destroy(x) ids = set([]) timing.start() for n in range(1, count+1): y = X() y.number = n id = p.add(y) if n < 5: __ok__(id in ids, False) else: if id in ids: __ok__(False, 'id collision on #%d' % n) ids.add(id) timing.finish() print('%g created per second' % (float(n)/(timing.t1-timing.t0))) found = glob.glob('./%s/*.blob' % name) __ok__(len(found), len(ids)) p.close() p = FileStore(name) for id in ids: p.destroy(id) found = glob.glob('./%s/*/*' % name) __ok__(len(found), 0) p.close() os.rmdir(name) print('Testing persistent dbmstore...') # use a dbmstore p = DbmStore(name) x = X() p.add(x) id = x.id() __ok__(id is not None) __ok__(p.exists(id), id in p) y = fetch(id) __ok__(x.id() == y.id()) __ok__(x is y) y = x = None p.destroy(id) y = fetch(id) __ok__(y, None) ids = set([]) timing.start() for n in range(1, count+1): y = X() y.number = n id = p.add(y) if n < 5: __ok__(id in ids, False) else: if id in ids: __ok__(False, 'id collision on #%d' % n) ids.add(id) timing.finish() print('%g created per second' % (float(n)/(timing.t1-timing.t0))) p.close() p = DbmStore(name) for id in ids: p.destroy(id) p.close() os.unlink(name + '.db')