def __init__(self, fuse, path, flags, *mode): self.entry = flv_resolve_path(path) self.cmd = pid_to_command(fuse.GetContext()["pid"]) if isinstance(self.entry, FLVFile): self.file = file(self.entry.get_real_name()) self.read = self.__read_flvfile self.release = self.__release_flvfile self.subtitles = [] if self.cmd in ["mplayer"]: self.attach_subtitles() timing.start() elif isinstance(self.entry, VirtualFile): # Mplayer may use virtual files as playlists. # Since file contents should not change when # mplayer is using it, we copy the contents # here instead of reading the latest data. # A hack to get mplayer's playlist compatible # contents. self.entry.mplayer = self.cmd in ["mplayer"] self.virt_data = self.entry.get_contents() self.read = self.__read_virtual # This causes recent-list's last item name to # be truncated. #del self.entry.mplayer if self.entry == stats.lookup("language"): self.write = self.__write_lang elif self.entry == stats.lookup("dyncat"): self.write = self.__write_dyncat
def process(table, export=True, update=False): out = '/migrate/data/%s' % table if update: path_to_json = out + '/smc/update-%s.json' % table if not os.path.exists(path_to_json): raise RuntimeError("run the update query") return path_to_json else: path_to_json = out + '/smc/%s.json' % table if not os.path.exists(out): export = True if not export: return path_to_json timing.start(table, 'export_from_rethinkdb') if os.path.exists(out): os.system("rm -rf %s" % out) if table == 'accounts': s = "cd /migrate/smc/src&& . smc-env&& cd /migrate/smc/src/scripts/postgresql/migrate/&&time coffee repeated_emails.coffee" print s if os.system(s): raise RuntimeError("error deduplicating emails") s = "time rethinkdb export --password-file /migrate/secrets/rethinkdb --format json -d %s -c db3 -e smc.%s" % ( out, table) print s if os.system(s): raise RuntimeError("error exporting from rethinkdb - %s" % table) timing.done(table, 'export_from_rethinkdb') return path_to_json
def process(table): if table not in tables: raise RuntimeError("no such table: '%s'" % table) if count: # only count timing.start(table, 'count') print "%s:" % table, sys.stdout.flush() s = "echo 'select count(*) FROM %s' | psql %s" % (table, db) c = os.popen(s).read() i = c.rfind('-') + 1 j = c.rfind("(") print c[i:j].strip() timing.done(table, 'count') return T = tables[table] print T if T.get('skip', False): return if update and not T.get('update', False): return print "get from rethinkdb as json" path_to_json = export_from_rethinkdb.process(table, export, update) print "convert json to csv" path_to_csv = json_to_csv.process(path_to_json, export) if T.get('fix_timestamps', False): print "fix timestamps in the csv file" path_to_csv = fix_timestamps.process(path_to_csv) # path changes print "load csv into database" read_from_csv.process(path_to_csv) print "parse JSONB data in the database to relational data" populate_relational_table.process(table, T.get('replace', False) or not update)
def main(): import DNS, timing, socket, time res = {} for server in servers: res[server] = [100000, 0, 0, 0] # min,max,tot,failed for what, querytype in lookups: for count in range(rpts): for server in servers: d = DNS.DnsRequest(server=server, timeout=1) fail = 0 timing.start() try: r = d.req(name=what, qtype=querytype) except DNS.Error: fail = 1 timing.finish() if fail: res[server][3] = res[server][3] + 1 print "(failed)", res[server][3] if 0: if r.header['ancount'] == 0: print "WARNING: Server",server,"got no answers for", \ what, querytype t = timing.milli() print server, "took", t, "ms for", what, querytype res[server][0] = min(t, res[server][0]) res[server][1] = max(t, res[server][1]) res[server][2] = res[server][2] + t for server in servers: queries = rpts * len(lookups) r = res[server] print "%-30s %2d/%2d(%3.2f%%) %dms/%dms/%dms min/avg/max" % ( socket.gethostbyaddr(server)[0], queries - r[3], queries, ((queries - r[3]) * 100.0) / queries, r[0], r[2] / queries, r[1])
def main(): """ Entry point """ # Begin user input print("> Welcome to this implementation of linear search!") option = '?' while(option == '?'): # Check for input error option = input("> 1. Manually enter input\n> 2. Enter input from a file\n") if (option == '1'): # Collect input from command line l, search = search_in.manual_in() # Start timer and conduct search timing.start() linear_search(l, search) return elif (option == '2'): # Collect input from file l, search = search_in.file_in() # Start timer and conduct search timing.start() linear_search(l, search) return else: print("> ERROR: Incorrect option.") option = '?' continue
def process(args): if os.path.exists(options.output) and options.erase: shutil.rmtree(options.output) if not os.path.exists(options.output): os.makedirs(options.output) if options.static is not None: static = options.static.split(',') for dir in static: outpath = os.path.join(options.output, dir) if os.path.exists(outpath): shutil.rmtree(outpath) copytree(dir, outpath) if options.trac and os.path.isdir(options.trac): projects = read_trac(options.trac) else: if options.data: projects = read_data(options.data) else: projects = [] projects.sort(key=operator.itemgetter('desc'), cmp=locale.strcoll) timing.start() for dirpath, dirnames, filenames in os.walk(options.input): try: process_dir(dirpath, filenames, projects) except: if options.keepgoing: print 'Error!' else: raise timing.finish() print 'Website build time: %s' % timing.milli()
def main(): # Begin user input print("> Welcome to this implementation of bubble sort!") option = '?' while (option == '?'): # Check for input error option = input( "> 1. Manually enter input\n> 2. Enter input from a file\n") if (option == '1'): # Collect input from command line l = sort_in.manual_in() # Start timer and conduct search timing.start() sort(l) return elif (option == '2'): # Collect input from file l = sort_in.file_in() # Start timer and conduct search timing.start() sort(l) return else: print("> ERROR: Incorrect option.") option = '?' continue return
def process(table, export=True, update=False): out = '/migrate/data/%s'%table if update: path_to_json = out + '/smc/update-%s.json'%table if not os.path.exists(path_to_json): raise RuntimeError("run the update query") return path_to_json else: path_to_json = out + '/smc/%s.json'%table if not os.path.exists(out): export = True if not export: return path_to_json timing.start(table, 'export_from_rethinkdb') if os.path.exists(out): os.system("rm -rf %s"%out) if table == 'accounts': s = "cd /migrate/smc/src&& . smc-env&& cd /migrate/smc/src/scripts/postgresql/migrate/&&time coffee repeated_emails.coffee" print s if os.system(s): raise RuntimeError("error deduplicating emails") s = "time rethinkdb export --password-file /migrate/secrets/rethinkdb --format json -d %s -c db3 -e smc.%s"%( out, table) print s if os.system(s): raise RuntimeError("error exporting from rethinkdb - %s"%table) timing.done(table, 'export_from_rethinkdb') return path_to_json
def start(): global arg, line, row, house, house_list, start_line, start_row, history, failed_set, driver, num_map #origin_house at house_list[0] house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) #print travel(2, 2, 1) ''' history = '' if travel(0, 1, 1): url = 'http://www.qlcoder.com/train/crcheck?x='+str(2+1)+'&y='+str(2+1)+'&path='+history print url ''' print 'search 3' for i in range(line): for j in range(row): if house[i][j] == 3: #print '%d,%d\r' % (i, j), #print 'walk', i, j history = '' if travel(i, j, 1): url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+history print url return url timing.stop() print 'search 2' timing.start() del house_list[:] house_list = [] house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) for i in range(line): for j in range(row): if house[i][j] == 2: #print '%d,%d\r' % (i, j), #print 'walk', i, j history = '' if travel(i, j, 1): url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+history print url return url timing.stop() print 'search 1,4' timing.start() del house_list[:] house_list = [] house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) for i in range(line): for j in range(row): if house[i][j] == 4 or house[i][j] == 1: #print '%d,%d\r' % (i, j), #print 'walk', i, j history = '' if travel(i, j, 1): url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+history print url return url return None
def wrapper(*args, **kw): timing.start() function(*args, **kw) timing.finish() f = open("timing.csv", "a") f.write("%s,%s,%d\n"%(datetime.now(), function.__name__, timing.micro())) f.flush() f.close()
def call_method(num): global process_size timing.start() z = eval('method' + str(num))() timing.finish() print "method", num print "time", float(timing.micro()) / 1000000 print "output size ", len(z) / 1024, "kb" print "process size", process_size, "kb" print
def _cursor_execute(cursor,sql, vars): utils.sql_log("\n%s\n" % sql) utils.sql_log("\n%s\n" % str(vars)) # TODO: maybe time things only if debug flag is on timing.start() cursor.execute(sql, vars) timing.finish() elapsed = timing.milli() if elapsed > config.TIMING_THRESHOLD: utils.sql_log("TIMING: " + str(elapsed))
def process(x): base, ext = os.path.splitext(x) name = os.path.split(base)[1] if name.endswith('-time'): name = name[:-5] if name.startswith('update-'): name = name[len('update-'):] timing.start(name, 'read_from_csv') s = """time echo "drop table %s_json; create table %s_json (a JSONB); copy %s_json from '%s' with (format csv, DELIMITER e'\\1', QUOTE e'\\2');" | psql %s """%(name, name, name, os.path.abspath(x), db) print(s) if os.system(s): raise RuntimeError("error exporting from rethinkdb - %s"%x) timing.done(name, 'read_from_csv')
def testpacker(): N = 2500 R = range(N) import timing # See section 4.1.4 of RFC 1035 timing.start() for i in R: p = Packer() p.addaddr('192.168.0.1') p.addbytes('*' * 20) p.addname('f.ISI.ARPA') p.addbytes('*' * 8) p.addname('Foo.F.isi.arpa') p.addbytes('*' * 18) p.addname('arpa') p.addbytes('*' * 26) p.addname('') timing.finish() print timing.milli(), "ms total for packing" print round(timing.milli() / i, 4), 'ms per packing' #p.dump() u = Unpacker(p.buf) u.getaddr() u.getbytes(20) u.getname() u.getbytes(8) u.getname() u.getbytes(18) u.getname() u.getbytes(26) u.getname() timing.start() for i in R: u = Unpacker(p.buf) res = (u.getaddr(), u.getbytes(20), u.getname(), u.getbytes(8), u.getname(), u.getbytes(18), u.getname(), u.getbytes(26), u.getname()) timing.finish() print timing.milli(), "ms total for unpacking" print round(timing.milli() / i, 4), 'ms per unpacking'
def robotStart(self): #origin_house at self.house_list[0] #print travel(2, 2, 1) ''' self.house_list.append((copy.deepcopy(self.house), copy.deepcopy(self.num_map))) self.history = '' i = 30 j = 6 result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+self.history print url ''' # print 'search '+str(mission_type)+' '+str(reverse) timing.start() self.house_list.append((copy.deepcopy(self.house), copy.deepcopy(self.num_map))) while True: i, j = (-1, -1) self.data_lock.acquire() result_size = self.result.qsize() queue_size = self.queue.qsize() if queue_size > 0: i, j = self.queue.get() self.data_lock.release() else: self.data_lock.release() print self.name+' break1' break if i != -1 and j != -1 and result_size == 0: self.sum_walk += 1 # print '%d,%d\r' % (i, j), print '(%d, %d)\twalk\t%d, %d\t%s%d/%d' \ % (self.mission_type, self.reverse, i, j, '+' if self.reverse == 0 else '-', self.total_queue-queue_size, self.total_queue) self.history = '' result = self.travel(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+self.history print url self.sum_second += timing.stop(bPrint=False) return url else: print self.name+' break2' break self.sum_second += timing.stop(bPrint=False) return None
def time(bmark, gs, ls, N=1, force_exit=0): code = compiler.compile(bmark, "benchmarks", "exec") try: timing.start() for i in range(N): exec(code, ls, gs) timing.finish() t = timing.micro() / 10.0**6 if t > MIN_TIME_RESOLUTION or force_exit: return t / N # (t - otime(gs, ls, N))/N else: return time(bmark, gs, ls, N * 2) except: return None
def time(bmark, gs, ls, N=1, force_exit=0): code = compiler.compile(bmark,"benchmarks","exec") try: timing.start() for i in range(N): exec(code, ls, gs) timing.finish() t = timing.micro()/10.0**6 if t > MIN_TIME_RESOLUTION or force_exit: return t/N # (t - otime(gs, ls, N))/N else: return time(bmark, gs, ls, N*2) except: return None
def process(file): print "fix timestamps in %s"%file base = os.path.splitext(file)[0] out_filename_csv = base + '-time.csv' if os.path.exists(out_filename_csv): print("output file %s already exists; not overwriting it"%out_filename_csv) return out_filename_csv timing.start(os.path.split(base)[-1], 'fix_timestamps') out = open(out_filename_csv, 'w') for x in open(file).xreadlines(): out.write(json.dumps(fix_timestamps(json.loads(x[:-1]))) + '\n') out.close() timing.done(os.path.split(base)[-1], 'fix_timestamps') return out_filename_csv
def process(file): print "fix timestamps in %s" % file base = os.path.splitext(file)[0] out_filename_csv = base + '-time.csv' if os.path.exists(out_filename_csv): print("output file %s already exists; not overwriting it" % out_filename_csv) return out_filename_csv timing.start(os.path.split(base)[-1], 'fix_timestamps') out = open(out_filename_csv, 'w') for x in open(file).xreadlines(): out.write(json.dumps(fix_timestamps(json.loads(x[:-1]))) + '\n') out.close() timing.done(os.path.split(base)[-1], 'fix_timestamps') return out_filename_csv
def play_game(game, *players): "Play an n-person, move-alternating game." #timing added from timing.py timing.start() state = game.initial while True: for player in players: move = player(game, state) state = game.make_move(move, state) if game.terminal_test(state): game.display(state) timing.finish() sec = timing.seconds() thousandths = (timing.milli() % 1000) #print "\n{0}.{1} seconds\n\n".format(sec, thousandths) return game.end_game(game.utility(state, players[0]), state)
def testpacker(): N = 2500 R = range(N) import timing # See section 4.1.4 of RFC 1035 timing.start() for i in R: p = Packer() p.addaddr('192.168.0.1') p.addbytes('*' * 20) p.addname('f.ISI.ARPA') p.addbytes('*' * 8) p.addname('Foo.F.isi.arpa') p.addbytes('*' * 18) p.addname('arpa') p.addbytes('*' * 26) p.addname('') timing.finish() print(timing.milli(), "ms total for packing") print(round(timing.milli() / i, 4), 'ms per packing') # p.dump() u = Unpacker(p.buf) u.getaddr() u.getbytes(20) u.getname() u.getbytes(8) u.getname() u.getbytes(18) u.getname() u.getbytes(26) u.getname() timing.start() for i in R: u = Unpacker(p.buf) res = (u.getaddr(), u.getbytes(20), u.getname(), u.getbytes(8), u.getname(), u.getbytes(18), u.getname(), u.getbytes(26), u.getname()) timing.finish() print(timing.milli(), "ms total for unpacking") print(round(timing.milli() / i, 4), 'ms per unpacking')
def __test_heap_impl(heap, data): nodes = [] for d in data: nodes.append(heap.make_node(d)) H = heap.make_heap() timing.start() for x in nodes: heap.insert(H, x) timing.finish() t_insert = timing.micro() timing.start() while not heap.is_empty(H): heap.extract(H) timing.finish() t_extract = timing.micro() return t_insert, t_extract
def process(path_to_json, do_it=True): base, ext = os.path.splitext(path_to_json) # The grep -v '\\\\u0000' skips any json record with null bytes. These are not valid/meaningful # for postgres, and happen in a very small handful of non-important records. path_to_csv = "%s.csv" % base if not os.path.exists(path_to_csv): do_it = True if not do_it: return path_to_csv timing.start(os.path.split(base)[-1], 'json_to_csv') s = "time sed 's/,$//' %s | head -n -1 | tail -n +2 | grep -v '\\\\u0000' > %s" % ( path_to_json, path_to_csv) print(s) if os.system(s): raise RuntimeError("error converting json to csv - %s" % path_to_json) timing.done(os.path.split(base)[-1], 'json_to_csv') return path_to_csv
def robotStart(self, mission_type, reverse): #origin_house at self.house_list[0] #print travel(2, 2, 1) ''' self.house_list.append((copy.deepcopy(self.house), copy.deepcopy(self.num_map))) self.history = '' i = 30 j = 6 result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+self.history print url ''' # print 'search '+str(mission_type)+' '+str(reverse) timing.start() self.house_list.append( (copy.deepcopy(self.house), copy.deepcopy(self.num_map))) i_range = range(self.line) j_range = range(self.row) if reverse == 1: i_range.reverse() j_range.reverse() for i in i_range: for j in j_range: self.data_lock.acquire() queue_size = self.queue.qsize() self.data_lock.release() if self.house[i][j] == int(mission_type) and queue_size == 0: self.sum_walk += 1 # print '%d,%d\r' % (i, j), print '(%d, %d) walk %d %d' % (self.mission_type, self.reverse, i, j) self.history = '' result = self.travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x=' + str( i + 1) + '&y=' + str(j + 1) + '&path=' + self.history print url self.sum_second += timing.stop() return url self.sum_second += timing.stop() return None
def testpacker(): N = 25 R = range(N) import timing # See section 4.1.4 of RFC 1035 timing.start() for i in R: p = Packer() p.addbytes('*' * 20) p.addname('f.ISI.ARPA') p.addbytes('*' * 8) p.addname('Foo.F.isi.arpa') p.addbytes('*' * 18) p.addname('arpa') p.addbytes('*' * 26) p.addname('') timing.finish() print round(timing.milli() * 0.001 / N, 3), 'seconds per packing' p.dump() u = Unpacker(p.buf) u.getbytes(20) u.getname() u.getbytes(8) u.getname() u.getbytes(18) u.getname() u.getbytes(26) u.getname() timing.start() for i in R: u = Unpacker(p.buf) res = (u.getbytes(20), u.getname(), u.getbytes(8), u.getname(), u.getbytes(18), u.getname(), u.getbytes(26), u.getname()) timing.finish() print round(timing.milli() * 0.001 / N, 3), 'seconds per unpacking' for item in res: print item
def robotStart(self, mission_type, reverse): #origin_house at self.house_list[0] #print travel(2, 2, 1) ''' self.house_list.append((copy.deepcopy(self.house), copy.deepcopy(self.num_map))) self.history = '' i = 30 j = 6 result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+self.history print url ''' # print 'search '+str(mission_type)+' '+str(reverse) timing.start() self.house_list.append((copy.deepcopy(self.house), copy.deepcopy(self.num_map))) i_range = range(self.line) j_range = range(self.row) if reverse == 1: i_range.reverse() j_range.reverse() for i in i_range: for j in j_range: self.data_lock.acquire() queue_size = self.queue.qsize() self.data_lock.release() if self.house[i][j] == int(mission_type) and queue_size == 0: self.sum_walk += 1 # print '%d,%d\r' % (i, j), print '(%d, %d) walk %d %d' % (self.mission_type, self.reverse, i, j) self.history = '' result = self.travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+self.history print url self.sum_second += timing.stop() return url self.sum_second += timing.stop() return None
def process(table, replace=False): timing.start(table, 'populate_relational_table') try: disable_triggers(table) if replace: s = 'echo "DELETE FROM %s;" | psql %s'%(table, db) print(s) if os.system(s): raise RuntimeError("error populating relational data - %s - dropping table failed"%table) s = "psql --set ON_ERROR_STOP=1 -d %s -a -f %s/sql/import-%s_json.sql"%(db, path, table) print(s) if os.system(s): raise RuntimeError("error populating relational data - %s"%table) s = "time echo 'drop table %s_json' | psql %s"%(table, db) print(s) if os.system(s): raise RuntimeError("error dropping intermediate table %s_json"%table) timing.done(table, 'populate_relational_table') finally: enable_triggers(table)
def generateRouteFeatures(data): t0 = start() features = {} for i, driverID in enumerate(data.keys()): driverData = data[driverID] print "Process driver {0}".format(driverID) features[driverID] = createRoutes(driverData, driverID) if i % 5 == 0: inter(t0, i, len(data)) end(t0) savefile = setFile("/Users/tgadfort/Documents/pymva/axa", "driverPaths.p") saveJoblib(savefile, features, compress=True)
def test_python_structures(): num_vertices = 200 V = [] for i in xrange(num_vertices): V.append(i) G1 = {} timing.start() for u in V: for v in V: G1[u * len(V) + v] = True timing.finish() t1 = timing.micro() G2 = {} timing.start() for u in V: for v in V: G2[(u, v)] = True timing.finish() t2 = timing.micro() print "It took %d %d microseconds to insert %d elements into the dictionary."\ % (t1, t2, num_vertices**2)
def test_python_structures(): num_vertices = 200 V = [] for i in xrange(num_vertices): V.append(i) G1 = {} timing.start() for u in V: for v in V: G1[u*len(V)+v] = True timing.finish() t1 = timing.micro() G2 = {} timing.start() for u in V: for v in V: G2[(u,v)] = True timing.finish() t2 = timing.micro() print "It took %d %d microseconds to insert %d elements into the dictionary."\ % (t1, t2, num_vertices**2)
def main(): import DNS, timing, socket, time res = {} for server in servers: res[server] = [100000,0,0,0] # min,max,tot,failed for what,querytype in lookups: for count in range(rpts): for server in servers: d = DNS.DnsRequest(server=server,timeout=1) fail = 0 timing.start() try: r=d.req(name=what,qtype=querytype) except DNS.Error: fail = 1 timing.finish() if fail: res[server][3] = res[server][3] + 1 print "(failed)",res[server][3] if 0: if r.header['ancount'] == 0: print "WARNING: Server",server,"got no answers for", \ what, querytype t = timing.milli() print server,"took",t,"ms for",what,querytype res[server][0] = min(t,res[server][0]) res[server][1] = max(t,res[server][1]) res[server][2] = res[server][2] + t for server in servers: queries = rpts * len(lookups) r = res[server] print "%-30s %2d/%2d(%3.2f%%) %dms/%dms/%dms min/avg/max" % ( socket.gethostbyaddr(server)[0], queries - r[3], queries, ((queries-r[3])*100.0)/queries, r[0], r[2] / queries, r[1])
def process(args): if os.path.exists(options.output) and options.erase: shutil.rmtree(options.output) safe_makedir(options.output) if options.static is not None: static = options.static.split(','); for dir in static: outpath = os.path.join(options.output, dir) if os.path.exists(outpath): shutil.rmtree(outpath) copytree(dir, outpath) if options.input is not None: timing.start() for dirpath, dirnames, filenames in os.walk(options.input): try: process_dir(dirpath, filenames) except: if options.keepgoing: print 'Error!' else: raise timing.finish() if not options.rss: print 'Website build time: %s' % timing.milli()
def generateDriverModels(data): t0 = start() features = {} for i, driverID in enumerate(data.keys()): driverData = data[driverID] print "Process driver {0}".format(driverID) dm = DriverModel(driverID, driverData) results, headers = dm.agg_mat, dm.agg_headers results = nan_to_num(results) features[driverID] = results if i % 5 == 0: inter(t0, i, len(data)) end(t0) savefile = setFile("/Users/tgadfort/Documents/pymva/axa", "driverModels.p") saveJoblib(savefile, features, compress=True)
def generateTripFeatures(data): t0 = start() features = {} for i, driverID in enumerate(data.keys()): driverData = data[driverID] print "Process driver {0}".format(driverID) results = None for j, trip in enumerate(driverData): tripResults = tripFeatures(trip.values) if results is None: results = tripResults else: results = vstack((results, tripResults)) results = nan_to_num(results) features[driverID] = results if i % 5 == 0: inter(t0, i, len(data)) end(t0) savefile = setFile("/Users/tgadfort/Documents/pymva/axa", "driverTripFeatures.p") saveJoblib(savefile, features, compress=True)
Using a custom made timing script, markers can be timed with ease. All you need to do is input marker names, and press enter when you complete them. ''' os.system("clear") marker = "blank" markers = [] while marker != "": marker = input("Marker name: ") if marker: markers.append(marker) os.system("clear") for i in range(10): print(str(10 - i) + " seconds remaining to start...") time.sleep(1) os.system("clear") os.system("clear") for marker in markers: timing.start(marker) input("Timing for marker \"" + marker + "\"") timing.stop(marker) os.system("clear") os.system("clear") timing.sumtimes(markers)
from test_support import verbose import timing r = range(100000) if verbose: print 'starting...' timing.start() for i in r: pass timing.finish() if verbose: print 'finished' secs = timing.seconds() milli = timing.milli() micro = timing.micro() if verbose: print 'seconds:', secs print 'milli :', milli print 'micro :', micro
def otime(gs, ls, N): timing.start() for i in range(N): pass timing.finish() return timing.micro()
def driver_sendAnswer(ans): global driver driver.get(ans) time.sleep(3) content = driver.find_element_by_xpath("//body").text if content.__contains__(u"请先登陆"): driver_login() driver_sendAnswer(ans) if __name__ == "__main__": driver_init() try: if True: timing.start() init( """ level=79&x=29&y=30&map=001000100000011111000000011000000010001000000100011000000010000000011000100101110010001110111101110000100001000110100000001001000100001000001000100001000010000001100010101000010001011110100011001110000001000001001100001100011000110101100001100001000100001010010000001001001001110000100011000011101111000000000010010000100000001100011000001011000010100000011100011000100010001010011100000000011110001110000001000000000010000110000100000101111110000000110110010001000001110001100100000100000011011000000101110000000001101100011000001001000000111001101100100011101000000000001000100110001010000000100000000010000100011010010110111000011000000001001001000110000000000011111000001100001111000111110000001000111110000000010000100100000010000000110000100110100001011000100011110010010000001001010000101011000111000001000000010100100000010110000100000000000100000111110000110000 """ ) ans = start() timing.stop() else: while True: timing.start() init(driver_getQuestion()) ans = start() if ans is not None: driver_sendAnswer(ans) timing.stop()
def __init__(self): if not pynotify.init("Init"): debug("Error: Failed to initialize pynotify.") sys.exit(1) parser = OptionParser(usage="%prog [options]", version="0.1", description="Tray based notifier for MPD.") parser.add_option('-v', dest='verbosity', help='Display debugging output', action="store_const", const=1) parser.add_option('-d', '--disable-lastfm', dest='lastfm', help='Disable lastfm functionality', action="store_false") parser.add_option('-l', '--disable-lyrics', dest='lyrics', help='Disable lyrics functionality', action="store_false") parser.set_defaults(**DEFAULTS) (option_obj, args) = parser.parse_args() options = {} options['verbosity'] = option_obj.verbosity options['lastfm'] = option_obj.lastfm options['lyrics'] = option_obj.lyrics # FIXME: Ewww. global DEBUG_LVL DEBUG_LVL = options['verbosity'] print options self.track = {} self.artist_url = None self.title_url = None self.uri = None self.tray = None self.menu = None self.buttons = None self.box = None self.n = None self.recent_tracks = [] self.recent_tracks_menu = None self.friends_menus = {} # Initialize 'modules' self.lastfm = Lastfm() self.lyrics = Lyrics() debug("* Populating playlist...") timing.start() self.playlist = mpdclient2.connect().playlistinfo() timing.finish() debug("..done. (%d seconds)" % timing.seconds()) if options['lastfm']: debug("* Enabling last.fm functionality") self.lastfm.enable() debug("* Fetching recently played tracks") self.recent_tracks = self.lastfm.fetch_recent_tracks(LAST_FM['USERNAME']) debug("* Starting friend grabs every minute") gobject.timeout_add(60000, self.lastfm.sched_friends_data) if options['lyrics']: debug("* Enabling lyrics functionality") self.lyrics.enable() self.create_tray() # Check for track change every second gobject.timeout_add(1000, self.check_for_change)
def measures_interface(): timing.start() POSITION = True # 30 EPOCH = True # 13 LISTCODES = True # 25 SOURCELIST = True # 39 LINELIST = True # 24 OBSLIST = True # 28 SPECTRALLINE = True # 40 FRAMENOW = True # 18 SHOWFRAME = True # 37 DIRECTION = True # 6 DIRSHOW = True # 7 COMETNAME = True # 3 COMETTYPE = True # 5 COMETTOPO = True # 4 RISE = True # 32 RISESET = True # 33 BASELINE = True # 2 EXPAND = True # 14 SEPARATION = True # 34 POSANGLE = True # 29 RADIALVELOCITY = True # 31 DOPPLER = True # 10 FREQUENCY = True # 19 SOURCE = True # 38 OBSERVATORY = True # 27 EARTHMAGNETIC = True # 12 UVW = True # 46 DONE = True # 9 TODOPPLER = True # 41 TOFREQUENCY = True # 42 TORADIALVELOCITY = True # 43 TORESTFREQUENCY = True # 44 TOUVW = True # 45 MEASURE = True # 26 GETREF = True # 21 GETOFFSET = True # 20 GETTYPE = True # 22 GETVALUE = True # 23 SHOW = True # 35 SHOWAUTO = True # 36 DOFRAME = True # 8 DOSHOWAUTO = True # 11 FRAMEAUTO = True # 15 FRAMECOMET = True # 16 FRAMENOAUTO = True # 17 ADDXVALUE = True # 0 ASBASELINE = True # 1 if (ADDXVALUE): addxvalue() if (ASBASELINE): asbaseline() if (BASELINE): baseline() if (COMETNAME): cometname() if (COMETTOPO): comettopo() if (COMETTYPE): comettype() if (DIRECTION): direction() if (DIRSHOW): dirshow() if (DOFRAME): doframe() if (DONE): done() if (DOPPLER): doppler() if (EARTHMAGNETIC): earthmagnetic() if (EPOCH): epoch() if (EXPAND): expand() if (FRAMECOMET): framecomet() if (FRAMENOW): framenow() if (FREQUENCY): frequency() if (GETOFFSET): getoffset() if (GETREF): getref() if (GETTYPE): gettype() if (GETVALUE): getvalue() if (LINELIST): linelist() if (LISTCODES): listcodes() if (MEASURE): measure() if (OBSERVATORY): observatory() if (OBSLIST): obslist() if (POSANGLE): posangle() if (POSITION): position() if (RADIALVELOCITY): radialvelocity() if (RISE): rise() if (RISESET): riseset() if (SEPARATION): separation() if (SHOW): show() if (SHOWFRAME): showframe() if (SOURCE): source() if (SOURCELIST): sourcelist() if (SPECTRALLINE): spectralline() if (TODOPPLER): todoppler() if (TOFREQUENCY): tofrequency() if (TORADIALVELOCITY): toradialvelocity() if (TORESTFREQUENCY): torestfrequency() if (TOUVW): touvw() if (UVW): uvw() timing.finish() print 'Measures interface time is: ', timing.milli() / 1000. return True
def start(): global arg, line, row, house, house_list, start_line, start_row, history, failed_set, driver, num_map, shadow, sum_walk, sum_second #origin_house at house_list[0] #print travel(2, 2, 1) ''' house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) history = '' i = 30 j = 6 result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+history print url ''' print 'search 3' house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) for i in range(line): for j in range(row): if house[i][j] == 3: sum_walk += 1 #print '%d,%d\r' % (i, j), print 'walk', i, j history = '' result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x=' + str( i + 1) + '&y=' + str(j + 1) + '&path=' + history print url return url sum_second += timing.stop() timing.start() print 'search 2' del house_list[:] house_list = [] house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) for i in range(line): for j in range(row): if house[i][j] == 2: sum_walk += 1 #print '%d,%d\r' % (i, j), print 'walk', i, j history = '' result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x=' + str( i + 1) + '&y=' + str(j + 1) + '&path=' + history print url return url sum_second += timing.stop() timing.start() print 'search 1,4' del house_list[:] house_list = [] house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) for i in range(line): for j in range(row): if house[i][j] == 4 or house[i][j] == 1: sum_walk += 1 #print '%d,%d\r' % (i, j), print 'walk', i, j history = '' result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x=' + str( i + 1) + '&y=' + str(j + 1) + '&path=' + history print url return url return None
def start(): global arg, line, row, house, house_list, start_line, start_row, history, failed_set, driver, num_map, shadow, sum_walk, sum_second #origin_house at house_list[0] #print travel(2, 2, 1) ''' house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) history = '' i = 30 j = 6 result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+history print url ''' print 'search 3' house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) for i in range(line): for j in range(row): if house[i][j] == 3: sum_walk += 1 #print '%d,%d\r' % (i, j), print 'walk', i, j history = '' result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+history print url return url sum_second += timing.stop() timing.start() print 'search 2' del house_list[:] house_list = [] house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) for i in range(line): for j in range(row): if house[i][j] == 2: sum_walk += 1 #print '%d,%d\r' % (i, j), print 'walk', i, j history = '' result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+history print url return url sum_second += timing.stop() timing.start() print 'search 1,4' del house_list[:] house_list = [] house_list.append((copy.deepcopy(house), copy.deepcopy(num_map))) for i in range(line): for j in range(row): if house[i][j] == 4 or house[i][j] == 1: sum_walk += 1 #print '%d,%d\r' % (i, j), print 'walk', i, j history = '' result = travel_first(i, j, 1) if result[0]: url = 'http://www.qlcoder.com/train/crcheck?x='+str(i+1)+'&y='+str(j+1)+'&path='+history print url return url return None
def measures_interface(): timing.start() POSITION = True # 30 EPOCH = True # 13 LISTCODES = True # 25 SOURCELIST = True # 39 LINELIST = True # 24 OBSLIST = True # 28 SPECTRALLINE = True # 40 FRAMENOW = True # 18 SHOWFRAME = True # 37 DIRECTION = True # 6 DIRSHOW = True # 7 COMETNAME = True # 3 COMETTYPE = True # 5 COMETTOPO = True # 4 RISE = True # 32 RISESET = True # 33 BASELINE = True # 2 EXPAND = True # 14 SEPARATION = True # 34 POSANGLE = True # 29 RADIALVELOCITY = True # 31 DOPPLER = True # 10 FREQUENCY = True # 19 SOURCE = True # 38 OBSERVATORY = True # 27 EARTHMAGNETIC = True # 12 UVW = True # 46 DONE = True # 9 TODOPPLER = True # 41 TOFREQUENCY = True # 42 TORADIALVELOCITY = True # 43 TORESTFREQUENCY = True # 44 TOUVW = True # 45 MEASURE = True # 26 GETREF = True # 21 GETOFFSET = True # 20 GETTYPE = True # 22 GETVALUE = True # 23 SHOW = True # 35 SHOWAUTO = True # 36 DOFRAME = True # 8 DOSHOWAUTO = True # 11 FRAMEAUTO = True # 15 FRAMECOMET = True # 16 FRAMENOAUTO = True # 17 ADDXVALUE = True # 0 ASBASELINE = True # 1 if (ADDXVALUE): addxvalue() if (ASBASELINE): asbaseline() if (BASELINE): baseline() if (COMETNAME): cometname() if (COMETTOPO): comettopo() if (COMETTYPE): comettype() if (DIRECTION): direction() if (DIRSHOW): dirshow() if (DOFRAME): doframe() if (DONE): done() if (DOPPLER): doppler() if (EARTHMAGNETIC): earthmagnetic() if (EPOCH): epoch() if (EXPAND): expand() if (FRAMECOMET): framecomet() if (FRAMENOW): framenow() if (FREQUENCY): frequency() if (GETOFFSET): getoffset() if (GETREF): getref() if (GETTYPE): gettype() if (GETVALUE): getvalue() if (LINELIST): linelist() if (LISTCODES): listcodes() if (MEASURE): measure() if (OBSERVATORY): observatory() if (OBSLIST): obslist() if (POSANGLE): posangle() if (POSITION): position() if (RADIALVELOCITY): radialvelocity() if (RISE): rise() if (RISESET): riseset() if (SEPARATION): separation() if (SHOW): show() if (SHOWFRAME): showframe() if (SOURCE): source() if (SOURCELIST): sourcelist() if (SPECTRALLINE): spectralline() if (TODOPPLER): todoppler() if (TOFREQUENCY): tofrequency() if (TORADIALVELOCITY): toradialvelocity() if (TORESTFREQUENCY): torestfrequency() if (TOUVW): touvw() if (UVW): uvw() timing.finish() print 'Measures interface time is: ',timing.milli()/1000. return True
def run(self, function): timing.start() self.solution = function() timing.finish() self.time = timing.milli()
def __test__(): from testing import __ok__ import timing print('Testing persistant object storage...') name = '_test_persist' if os.path.isdir(name): print('Directory %s already exists. Cannot complete tests.' % name) __ok__(name, None) return if os.path.exists(name + '.db'): print('Dbm file %s.db already exists. Cannot complete tests.' % name) __ok__(name, None) return count = 500 class _other (object): pass class X (Identified, Persistent, _other): pass for __ in ['_other','X']: globals()[__] = locals()[__] # use a filestore p = FileStore(name) __ok__(p, p) x = X() p.add(x) id = x.id() __ok__(id is not None) __ok__(p.exists(id), id in p) y = fetch(id) __ok__(x.id() == y.id()) __ok__(x is y) found = glob.glob('./%s/*' % name) __ok__(len(found), 1) __ok__(found[0], found[0]) y = x = None p.destroy(id) found = glob.glob('./%s/*' % name) __ok__(len(found), 0) y = fetch(id) __ok__(y, None) p.close() os.rmdir(name) print('Testing persistant filestore...') # use a filestore for many items p = FileStore(name) x = X() p.add(x) id = x.id() __ok__(id is not None) p.destroy(x) ids = set([]) timing.start() for n in range(1, count+1): y = X() y.number = n id = p.add(y) if n < 5: __ok__(id in ids, False) else: if id in ids: __ok__(False, 'id collision on #%d' % n) ids.add(id) timing.finish() print('%g created per second' % (float(n)/(timing.t1-timing.t0))) found = glob.glob('./%s/*.blob' % name) __ok__(len(found), len(ids)) p.close() p = FileStore(name) for id in ids: p.destroy(id) found = glob.glob('./%s/*/*' % name) __ok__(len(found), 0) p.close() os.rmdir(name) print('Testing persistent dbmstore...') # use a dbmstore p = DbmStore(name) x = X() p.add(x) id = x.id() __ok__(id is not None) __ok__(p.exists(id), id in p) y = fetch(id) __ok__(x.id() == y.id()) __ok__(x is y) y = x = None p.destroy(id) y = fetch(id) __ok__(y, None) ids = set([]) timing.start() for n in range(1, count+1): y = X() y.number = n id = p.add(y) if n < 5: __ok__(id in ids, False) else: if id in ids: __ok__(False, 'id collision on #%d' % n) ids.add(id) timing.finish() print('%g created per second' % (float(n)/(timing.t1-timing.t0))) p.close() p = DbmStore(name) for id in ids: p.destroy(id) p.close() os.unlink(name + '.db')