def test_time_ns_type(self): def check_ns(sec, ns): self.assertIsInstance(ns, int) sec_ns = int(sec * 1e9) # tolerate a difference of 50 ms self.assertLess((sec_ns - ns), 50 ** 6, (sec, ns)) check_ns(time.time(), time.time_ns()) check_ns(time.monotonic(), time.monotonic_ns()) check_ns(time.perf_counter(), time.perf_counter_ns()) check_ns(time.process_time(), time.process_time_ns()) if hasattr(time, 'clock_gettime'): check_ns(time.clock_gettime(time.CLOCK_REALTIME), time.clock_gettime_ns(time.CLOCK_REALTIME))
def lightSenor(self,r,rp,s): count = 0 GPIO.setup(r,GPIO.IN) GPIO.setup(rp,GPIO.OUT) GPIO.setup(s,GPIO.IN) while (True or count <= t): refc=GPIO.input(r) refp=GPIO.output(rp,GPIO.HIGH) if ( GPIO.start.r() != LightSenors.relfection(rp)): if (GPIO.start.r() != weight(gpio.input(s),)): refc.stop() refp.stop() GPIO.cleanup() else: if refc != refp: count = time.perf_counter_ns() else: lightCont = r().LightSenors.reflection(rp()) / count # this is going to count how long it takes to create a relfection return lightCont else: refc.stop() refp.stop() GPIO.cleanup() continue
def benchmark_morris(input_string: str, substring: str): start_time = time.perf_counter_ns() sub_string_search(input_string, substring) end_time = time.perf_counter_ns() return abs(end_time - start_time)
def sigio_handler(sig, frame): end_time = time.perf_counter_ns() print(colored('SIGIO detected!', 'green')) print('latency = ' + str(end_time - start_time) + 'ns') sys.exit(0)
def test_insert_data_WithdrawOrder(self): """ 测试 向 tb_withdraw_order 中插入数据 :return: """ # engine = create_engine("mysql+pymysql://root:[email protected]:3306/pg_database_dev", # max_overflow=0, # pool_size=5) print('test_insert_data_WithdrawOrder') try: all_instances = [] for i in range(10000): order = WithdrawOrder(serial_id= str(time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))) + str(int(time.perf_counter_ns()))[-7:] , order_id=''.join(random.sample(population=string.digits * 10, k=15)), pro_id=random.randint(0, 100), token_name= random.choice(seq=['BTC', 'ETH', 'USDT', 'HTDF']), callback_url='http://127.0.0.1:9999/testcallback/test' + \ ''.join(random.sample(population=string.ascii_lowercase * 10, k=43)), from_addr=''.join(random.sample(population=string.ascii_lowercase * 10, k=43)), to_addr=''.join(random.sample(population=string.ascii_lowercase * 10, k=43)), memo='', amount=random.uniform(0.00001, 9999999), #生成浮点数 block_height=0, tx_hash='', tx_confirmations=0, order_status= random.choice(["PROCESSING", "SUCCESS", "FAIL"]), transaction_status=random.choice(["NOTYET", "PENDING", "FAIL", "SUCCESS"]), notify_status=random.choice(["NOYET", "FISRTSUCCESS", "FIRSTFAIL", "SECONDSUCCESS", "SECONDFAIL"]), notify_times=random.randint(1, 10), block_time=datetime.datetime.now(), complete_time=datetime.datetime.now(), remark='' ) # session.add(instance=order, _warn=True) all_instances.append(order) self.session.add_all(instances=all_instances) # self.session.commit() self.session.flush() except Exception as e: print(f"error{e}") pass
hCount -= 2 oQueue.release() oCount -= 1 else: countLock.release() hQueue.acquire() print(threadName + "-> awaiting bond...") bondBarrier.wait() print(threadName + "-> bonded") #Run simulation startTime = time.perf_counter_ns() / 1000000 #make and start oxygen threads oxygens = [] for i in range(0, NUM_O): oxygens.append(threading.Thread(target=Oxygen, args = ("O" + str((i+1)),))) oxygens[i].start() #make and start hydrogen threads hydrogens = [] for i in range(0, NUM_H): hydrogens.append(threading.Thread(target=Hydrogen, args = ("H" + str((i+1)),))) hydrogens[i].start() #wait for oxygens to finish for i in range(0, NUM_O):
from elm import BvsbClassifier, BvsbUtils from sklearn import datasets from elm import elmUtils from sklearn.preprocessing import StandardScaler import time print("---------OSELM-BVSB-----------") data = elmUtils.readDataFileToData("data/zoo.data", targetIndex=0) print(f'数据集大小问{data.target.size}') data.data = StandardScaler().fit_transform(data.data) (train, iter, test) = elmUtils.splitDataWithIter(data.data, data.target, 0.05, 0.3) print(f'训练集大小为{train[1].size}') print(f'迭代训练集大小为{iter[1].size}') print(f'测试集大小为{test[1].size}') tic = time.perf_counter_ns() bvsbc = BvsbClassifier(train[0], train[1], iter[0], iter[1], test[0], test[1], iterNum=0.1) bvsbc.createOSELM(n_hidden=1000) bvsbc.trainOSELMWithoutKNN() toc = time.perf_counter_ns() print(f'OSELM-BVSB-KNN 正确率为{bvsbc.score(test[0], test[1])}') print(f'OSELM-BVSB-KNN 项目用时:{(toc - tic) / 1000 / 1000} ms')
def buildThalwegsFromTerrain(self, terrain): """ Builds a surface network on a given terrain. Start by detecting saddles. Follows with thalwegs and ridges. Ridges are constrained to avoid conflicts with thalwegs. Parameters ---------- terrain : Terrain Instance of the class Terrain on which the network is built. Returns ------- None. """ self.nodedict = {} self.nodeidx = {} self.thalwegdict = {} self.ridgedict = {} self.terrain = terrain #print('Computing all potential saddles') start = perf_counter_ns() saddledict, saddleidx = self.terrain.computeSaddles() #print('Removing conflicting saddles') self.mergeSaddles(saddledict, saddleidx) end = perf_counter_ns() print("Saddle computation time:", end - start) start = perf_counter_ns() terrain.saddleAndFlowAwareDiagonalisation(self.nodedict) end = perf_counter_ns() print("Diagonalisation time:", end - start) start = perf_counter_ns() self.computePits() end = perf_counter_ns() print("Pit computation time:", end - start) start = perf_counter_ns() self.createThalwegs() self.orderThalwegsAroundNodes() end = perf_counter_ns() print("Thalweg computation time (including ordering):", end - start) start = perf_counter_ns() self.computeHills() end = perf_counter_ns() print("Hill computation time:", end - start) start = perf_counter_ns() terrain.saddleAndThalwegAwareDiagonalisation() end = perf_counter_ns() print("Diagonalisation time:", end - start) start = perf_counter_ns() self.computePeaks() end = perf_counter_ns() print("Peak computation time:", end - start)
async def cmd_exec(self, code: CodeBlock): """ ->type Play With Me :snake: ->signature pg!exec <python code block> ->description Run python code in an isolated environment. ->extended description Import is not available. Various methods of builtin objects have been disabled for security reasons. The available preimported modules are: `math, cmath, random, re, time, string, itertools, pygame` To show an image, overwrite `output.img` to a surface (see example command). To make it easier to read and write code use code blocks (see [HERE](https://discord.com/channels/772505616680878080/774217896971730974/785510505728311306)). ->example command pg!exec \\`\\`\\`py ```py # Draw a red rectangle on a transparent surface output.img = pygame.Surface((200, 200)).convert_alpha() output.img.fill((0, 0, 0, 0)) pygame.draw.rect(output.img, (200, 0, 0), (50, 50, 100, 100))``` \\`\\`\\` ----- Implement pg!exec, for execution of python code """ async with self.channel.typing(): tstamp = time.perf_counter_ns() returned = await sandbox.exec_sandbox(code.code, tstamp, 10 if self.is_priv else 5) dur = returned.duration # the execution time of the script alone embed_dict = { "color": embed_utils.DEFAULT_EMBED_COLOR, "description": "", "author": { "name": f"Code executed in {utils.format_time(dur)}", "url": self.invoke_msg.jump_url, }, } file = None if returned.exc: embed_dict["description"] += "**Exception output:**\n" embed_dict["description"] += utils.code_block( returned.exc, 500) embed_dict["description"] += "\n" if returned.text: embed_dict["description"] += "**Text output:**\n" embed_dict["description"] += utils.code_block( returned.text, 1500) if returned.img: embed_dict["description"] += "\n**Image output:**" if os.path.getsize(f"temp{tstamp}.png") < 2**22: embed_dict["image_url"] = f"attachment://temp{tstamp}.png" file = discord.File(f"temp{tstamp}.png") else: embed_dict["description"] += ( "\n```\nGIF could not be sent.\n" "The GIF file size is above 4MiB```") elif returned._imgs: embed_dict["description"] += "\n**GIF output:**" if os.path.getsize(f"temp{tstamp}.gif") < 2**22: embed_dict["image_url"] = f"attachment://temp{tstamp}.gif" file = discord.File(f"temp{tstamp}.gif") else: embed_dict["description"] += ( "\n```GIF could not be sent.\n" "The GIF file size is above 4MiB```") try: await self.response_msg.delete() except discord.errors.NotFound: # Message already deleted pass embed = embed_utils.create_from_dict(embed_dict) await self.invoke_msg.reply(file=file, embed=embed, mention_author=False) if len(returned.text) > 1500: with io.StringIO(returned.text if len(returned.text) - 40 < self.filesize_limit else returned. text[:self.filesize_limit - 40]) as fobj: await self.channel.send( file=discord.File(fobj, filename="output.txt")) if file: file.close() for extension in ("gif", "png"): if os.path.isfile(f"temp{tstamp}.{extension}"): os.remove(f"temp{tstamp}.{extension}")
def print_packet(self, packet): # global variables available between instances of sniffed packet global startTimer global endTimer global contents global dstList global writeInterval global netByteRate global dportDict global sportDict global pairDict global writeCount # number of times written global meanFlowList # Update count of packet entries writeCount += 1 # Process packet flags tcpFlagsChk = ['U', 'A', 'P', 'R', 'S', 'F'] tcpFlagString = [ch for ch in str(packet[TCP].flags)] # example: [P, A] tcpFlags = [] for m in tcpFlagsChk: value = 0 for n in tcpFlagString: if (m == n): value = 1 break tcpFlags.append(value) ipFlagsChk = ['DF', 'MF'] ipFlagString = [ch for ch in str(packet[IP].flags)] # example: [P, A] ipFlags = [] for m in ipFlagsChk: value = 0 for n in ipFlagString: if (m == n): value = 1 break ipFlags.append(value) # TODO: Process packet payload packetPayload = binascii.hexlify(bytes(packet[TCP].payload)) packetPayloadsList.append(packetPayload) # Modbus Application Protocol (MBAP) Header # MBAP consists of 7 Bytes # 1 B = 2 hex, e.g. 0xFF transactionID = packetPayload[0:4] # 2 bytes protocolID = packetPayload[4:8] # 2 bytes lengthField = packetPayload[ 8:12] # 2 bytes, describes len of unitID,funcCode,data unitID = packetPayload[12:14] # 1 byte # Protocol Data Unit (PDU) # PDU consists of Function Code [1 Byte] and Data [x] # Modbus TCP embeds a standard Modbus data frame into the TCP frame functionCode = packetPayload[14:16] # 1 byte data = packetPayload[16:] # remaining variable # bytes # Activities to monitor -- Raw, Retrieved, Recorded csvPacket["MAC Destination"] = packet.dst csvPacket["MAC Source"] = packet.src csvPacket["Version"] = packet[IP].version csvPacket["IHL"] = packet[IP].ihl csvPacket["Type of Service"] = packet[IP].tos csvPacket["Total Length"] = packet[IP].len csvPacket["Identification"] = packet[IP].id csvPacket["IP Flags [DF]"] = ipFlags[0] csvPacket["IP Flags [MF]"] = ipFlags[1] csvPacket["Fragment Offset"] = packet[IP].frag csvPacket["Time to Live"] = packet[IP].ttl csvPacket["Protocol"] = packet[IP].proto csvPacket["Header Checksum"] = packet[IP].chksum csvPacket["Source Address"] = packet[IP].src csvPacket["Destination Address"] = packet[IP].dst csvPacket["Source Port"] = packet[TCP].sport csvPacket["Destination Port"] = packet[TCP].dport csvPacket["Payload [P]"] = packetPayload csvPacket["[P] Transaction ID"] = transactionID csvPacket["[P] Protocol ID"] = protocolID csvPacket["[P] Length Field"] = lengthField csvPacket["[P] Unit ID"] = unitID csvPacket["[P] Function Code"] = functionCode csvPacket["[P] Data"] = data csvPacket["Sequence Number"] = packet[TCP].seq csvPacket["Acknowledgement Number"] = packet[TCP].ack csvPacket["Data Offset"] = packet[TCP].dataofs csvPacket["Reserved"] = packet[TCP].reserved csvPacket["TCP Flags [URG]"] = tcpFlags[0] csvPacket["TCP Flags [ACK]"] = tcpFlags[1] csvPacket["TCP Flags [PSH]"] = tcpFlags[2] csvPacket["TCP Flags [RST]"] = tcpFlags[3] csvPacket["TCP Flags [SYN]"] = tcpFlags[4] csvPacket["TCP Flags [FIN]"] = tcpFlags[5] csvPacket["Window Size"] = packet[TCP].window csvPacket["Checksum"] = packet[TCP].chksum csvPacket["Urgent Pointer"] = packet[TCP].urgptr csvPacket["Packet Size [B]"] = len(packet) # Append to hash maps dportDict[str(packet[TCP].dport)] = None sportDict[str(packet[TCP].sport)] = None # If we've never seen this pair before, add it if (pairDict.get('MAC: ' + str(csvPacket["MAC Source"]) + ' IP: ' + str(packet[IP].src)) == None): pairDict['MAC: ' + str(csvPacket["MAC Source"]) + ' IP: ' + str(packet[IP].src)] = meanflow(client_IP=packet[IP].src, window=1) print("Found a new pair!") # Record timestamp timeStamp = datetime.now() csvPacket["Timestamp"] = timeStamp.strftime("%Y-%m-%d %H:%M:%S.%f") # TODO: Process Inter-Packet Metrics -- Calculated, Obtained, Derived # Inter-Packet Delay Metric, # Packet Processing Time in PLC, # Packet Rates, # Shannon Entropy, IPAT = 0 # Inter-packet arrival time - # Time of packet arrival # Time between packets arriving at same destination PPT = 0 # Packet process time - # Time PLC takes to process; i.e. # Time between receiving and transmitting a packet on PLC # Measure in nanosecond resolution timeStamp = time.time() # Determine IPD if (packet.dst in dstList): # src -> dst logged before # Retrieve last instance of dst when it received something index = len(dstList) - 1 - dstList[::-1].index(packet.dst) IPAT = timeStamp - dstList[index + 1] mf = meanflow(client_IP=packet.src, window=1) # When dst IP matches the last src IP, if (packet.dst in srcList): # Retrieve last instance of dst when it sent something index = len(srcList) - 1 - srcList[::-1].index(packet.dst) endTimer = perf_counter_ns() PPT = (endTimer - srcList[index + 1]) / (10**9) trueMF = pairDict['MAC: ' + str(csvPacket["MAC Source"]) + ' IP: ' + str(packet[IP].src)].check_flow() # Append (dst,time) to dstList dstList.append(packet.dst) dstList.append(timeStamp) # Start perf timer for the packet (ends when PLC responds) startTimer = perf_counter_ns() # Append (dst,start_time) to srcList srcList.append(packet.src) srcList.append(startTimer) # Calculate protocol overhead protocolOverhead = float(len(packet) - len(packetPayload)) / len(packet) protocolEff = float(len(packetPayload)) / len(packet) if (IPAT == 0): netByteRate = 0 else: netByteRate = float(csvPacket["Packet Size [B]"]) / IPAT throughput = protocolEff * (netByteRate * 8) # Extend row by metrics csvPacket["Inter-Packet Delay (s)"] = IPAT csvPacket["Packet Process Time (s)"] = PPT csvPacket["Protocol Overhead"] = protocolOverhead csvPacket["Protocol Efficiency"] = protocolEff csvPacket["Throughput"] = throughput # csvPacket["IP Check Flow Val"] = ipcf csvPacket["Client IP and Mean Flow"] = trueMF # Contents specific to the row contentsRow = list(csvPacket.values()) # Add the row to the global record of contents contents.extend([contentsRow]) # For every set interval, write if (len(contents) % writeInterval == 0): txsum = 0 for csvLine in contents: with open('output_packets.csv', 'a', newline='') as file_packet: writer_packets = csv.writer(file_packet) writer_packets.writerow(csvLine) # Add up bytes transmitted and then find byte rate to use txsum += csvLine[37] # netByteRate = float(txsum/len(contents)) / writeCount # Reset contents = []
#data = datasets.load_digits() # data=elmUtils.readDataFileToData("./data/glass.data", targetIndex=-1)#xxxxxxxxxxxx # data = datasets.fetch_olivetti_faces()#OK # ---------------------------数据集--------------------------------- #data = datasets.fetch_olivetti_faces()#这是第1个 #data = sklearn.datasets.fetch_covtype()#这是第2个 #data = datasets.load_iris()#这是第3个 #data = datasets.load_digits()#这是第4个 再前面加#屏蔽语句,把运行的打开 #data = datasets.load_wine()#这是第5个 #data = datasets.load_breast_cancer()#这是第6个 stdsc = StandardScaler() data.data = stdsc.fit_transform(data.data) / 16.0 label_size = 0.3 t1 = time.perf_counter_ns() train, test = elmUtils.splitData(data.data, data.target, 1 - label_size, True) svm = SVC() svm.fit(train[0], train[1]) tmp_acc = svm.score(test[0], test[1]) print(f'SVM 正确率为: {tmp_acc}') t2 = time.perf_counter_ns() print(f'SVM平均正确率为{tmp_acc}') print(f'SVM用时 {(t2 - t1) / 10000000}秒') toc = time.perf_counter_ns() acc_temp = tmp_acc #记录每次的精度
elif B[0]<=A[0]: sl.append(B.pop(0)) return sl if __name__=='__main__': # Test import random as rnd #rlist = [5,500,2000,5000,10000,15000,20000] r = int(input("Nombre d'éléments dans la liste : ")) #r = 20000 #l = [4,1,3,2] #timeListFS = [] #for r in rlist: l = [rnd.randrange(0,r) for i in range(r)] #print(l) start = perf_counter_ns() sl = TriFusion(l) end = perf_counter_ns() execution_time = round((end - start)*10**(-6),3) print("Time passed (ms) :",execution_time) if sl == sorted(l): print("Tri correct") else : print("Tri incorrect") #timeListFS.append(execution_time) #print("liste triée :",sl) #print(len(sl)) #print("Tri fait") #print(timeListFS)
def generate_ieid(): return [_flags.boot_id, time.perf_counter_ns()]
import signal import psutil import threading import sqlalchemy as sa import faulthandler import gzip import timeouter import uuid import glob import importlib import yaml import re # python 3.6 compat try: time.perf_counter_ns() except: time.perf_counter_ns = lambda: int(time.perf_counter() * 1000000000) try: yaml.warnings({'YAMLLoadWarning': False}) except: pass from eva.tools import format_json from eva.tools import wait_for as _wait_for from eva.tools import parse_host_port from eva.tools import get_caller from eva.tools import SimpleNamespace from eva.tools import Locker as GenericLocker
def stop(self): if self.start_time is None: raise Exception("Stopwatch not started!") self.end_time = time.perf_counter_ns()
import sys import urllib import urllib.parse import wsgiref.handlers import tqdm json_config_file = 'webconfig.json' content_bs = open(json_config_file, 'rb').read() content_str = content_bs.decode('utf-8') config_obj = json.loads(content_str) game_info_list_filepath = config_obj['game_info_list_filepath'] game_binary_url_list_filepath = config_obj['game_binary_url_list_filepath'] image_url_info_dict_filepath = config_obj['image_url_info_dict_filepath'] with open(game_info_list_filepath, 'rb') as infile: game_info_list = pickle.load(infile) with open(game_binary_url_list_filepath, 'rb') as infile: game_binary_url_list = pickle.load(infile) with open(image_url_info_dict_filepath, 'rb') as infile: image_url_info_dict = pickle.load(infile) start_time_ns = time.perf_counter_ns() game_info_list.sort(key=lambda x: x['name']) end_time_ns = time.perf_counter_ns() taken_time_ns = end_time_ns - start_time_ns print('taken_time_ns', taken_time_ns) print(f'taken_time_seconds: {(taken_time_ns/10e9):.3f}')
def start(self): if self.start_time is not None: raise Exception("Stopwatch already started!") self.start_time = time.perf_counter_ns()
def time_millis(): return time.perf_counter_ns() // 1_000_000
print(f"path influenced distance 2: {result[1]} for path: {result[0]}") expansions = [ (29, 65), (35, 34), (63, 26), (56, 65), (98, 26), (80, 66), (33, 105), (129, 28), (54, 151), (150, 74), (103, 113), (85, 153), (127, 114), (120, 153), (148, 145), (154, 114) ] total_distance = 0 count = 0 ns_pf = time.perf_counter_ns() pf.normalize_influence(100) pf.heuristic_accuracy = 1 for pos1 in expansions: for pos2 in expansions: result = pf.find_path(pos1, pos2, False) total_distance += result[1] count += 1 ns_pf = time.perf_counter_ns() - ns_pf print(f"pathfinding took {ns_pf / 1000 / 1000} ms. Total distance {total_distance}") print(f"pathfinding took {ns_pf / 1000 / 1000 / count} ms per path.")
def monotonic_ns() -> int: """Performance counter for benchmarking as nanoseconds.""" # noqa: D401 - Imperative mood return time.perf_counter_ns()
t = a_pow_n(a, n / 2) return t * t else: t = a_pow_n(a, (n - 1) / 2) return a * t * t if __name__ == "__main__": from time import perf_counter_ns import matplotlib.pyplot as plt list_n = [] list_rt = [] for x in range(10, 1000, 100): start_time = perf_counter_ns() r = a_pow_n(10, x) finish_time = perf_counter_ns() running_time = finish_time - start_time list_n.append(x) list_rt.append(running_time) print('pow(10,' + str(x) + ')\t\t time : ' + str(running_time)) plt.scatter(list_n, list_rt) plt.plot(list_n, list_rt) plt.title('Divide Conquer') plt.xlabel('Jumlah n') plt.ylabel('Running time (ns)') plt.show()
def ping(self): start = perf_counter_ns() self.send('database', self._now_focus, 'ping', [[], {}]) message = recv_msg(self.client) cost = (perf_counter_ns() - start) / 2 / 1000000 return [message[0], '{}ms'.format(str(cost)[:5])]
import time def multi_print(phrase: str, times): for i in range(times): print(phrase) start = time.perf_counter_ns() # get the time in nano second multi_print("hello", 10) end = time.perf_counter_ns() print(f"Time in nano seconds: {end-start}/1_000_000_000")
def inner(*args:Any,**kwargs:Dict[Any,Any])->Any: beg = time.perf_counter_ns() rv = func(*args,**kwargs) end = time.perf_counter_ns() logging.info('time lapsed(ns) :{}'.format(end-beg)) return rv
def wrap(*args, **kw): ts = perf_counter_ns() result = f(*args, **kw) te = perf_counter_ns() print(f"func:{f.__name__} took: {te-ts}ns") return result
import queue import time from Node import * priorityQueue = queue.PriorityQueue() nodes = Node.init_simplified_romania() start = nodes['Timisoara'] goal = nodes['Bucharest'] visited = {} for node in nodes: visited[node] = False start_time = time.perf_counter_ns() priorityQueue.put((start.get_heuristic_value(), start)) total_distance = 0 node = start while not priorityQueue.empty(): last_node = node node = priorityQueue.get()[1] if node != start: total_distance += last_node.child[node]['w'] last_node = node visited[node] = True print(f'{node.name} ---> ', end='') if node == goal:
# Register SIGIO Signal Handler import signal, time def sigio_handler(sig, frame): end_time = time.perf_counter_ns() print(colored('SIGIO detected!', 'green')) print('latency = ' + str(end_time - start_time) + 'ns') sys.exit(0) signal.signal(signal.SIGIO, sigio_handler) # Set PID for SIGIO fcntl.fcntl(sock, fcntl.F_SETOWN, os.getpid( )) # registering PID of otherside process is mandatory for SIGIO to happen # Open Another Socket sock2 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock2.connect(path) # Write to Socket start_time = time.perf_counter_ns() sock2.send(b'0') # Delay print('Wait for maximum 5 seconds...') time.sleep(5) print(colored('SIGIO not detected!', 'red')) sys.exit(1)
else: j = m if k == s[i]: return i else: return -1 import random import time def gen_random_list(n): assert (n > 0) l = [random.randint(0, 10 * n) for i in range(n)] return l l = gen_random_list(100000000) start = time.perf_counter_ns() linear_search(l, -42) spent = time.perf_counter_ns() - start print("It took " + str(spent) + " nanoseconds to perform linear search.") l.sort() start = time.perf_counter_ns() binary_search(l, -42) spent = time.perf_counter_ns() - start print("It took " + str(spent) + " nanoseconds to perfrom binary search")
return -1 def firstUniqChar_v4(s: str) -> int: """Worst case complexity O(n^2), but good for some corner cases.""" index = [s.index(c) for c in set(s) if s.count(c) == 1] return min(index) if index else -1 if __name__ == '__main__': print(firstUniqChar('leetcode'), 0) print(firstUniqChar('loveleetcode'), 2) print(firstUniqChar('abacabac'), -1) start = time.perf_counter_ns() print(firstUniqChar('the quick brown fox jumps over the lazy dog'), 4) print(firstUniqChar('bcdefghijklmnopqrstuvwxyzzyxwvutsrqponmlkjihgfedcba'), 50) print(firstUniqChar('bcdefghijklmnopqrstuvwxyzbcdefghijklmnopqrstuvwxyza'), 50) print(firstUniqChar('bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbxbbbbb'), 45) print('v1', time.perf_counter_ns() - start) start = time.perf_counter_ns() print(firstUniqChar_v2('the quick brown fox jumps over the lazy dog'), 4) print( firstUniqChar_v2( 'bcdefghijklmnopqrstuvwxyzbcdefghijklmnopqrstuvwxyza'), 50) print(
def measure_time(*args, **kwargs): start_time = perf_counter_ns() result = f(*args, **kwargs) duration = perf_counter_ns() - start_time print(f"{f.__name__} executed in: {duration:15} nanos") return result
def benchmark(input_string: str, substring: str, z_function): start_time = time.perf_counter_ns() get_substrings_indices(input_string, substring, z_function) end_time = time.perf_counter_ns() return abs(end_time - start_time)
async def process_graphs(self, ctx: MyContext, name: Tuple[Optional[str]], data, _: Callable, log: Optional[bool] = None): if log is None: msg: discord.Message = await ctx.reply( _("React with 📈 for a logarithmic graph.\n" "React with 📉 for a linear graph.\n" "This message expires after 10 seconds.")) def predicate(r: discord.RawReactionActionEvent): return r.message_id == msg.id and r.user_id == ctx.author.id and ( r.emoji.name == "📈" or r.emoji.name == "📉") await msg.add_reaction("📈") await msg.add_reaction("📉") try: event: discord.RawReactionActionEvent = await self.bot.wait_for( "raw_reaction_add", check=predicate, timeout=10) except asyncio.TimeoutError: await msg.edit(content=_( "Timed out. Request a new message with `{0}graphs`", ctx.prefix)) return log = event.emoji.name == '📈' await msg.edit( content=_("Please wait, this could take a few seconds...")) else: msg = await ctx.reply( _("Please wait, this could take a few seconds...")) buffer_name = f"{name[1].title() if name[1] else 'world'}_{'log' if log else 'lin'}" st = time.perf_counter_ns() graph_buffer = graph_cache.get(buffer_name) if not graph_buffer: cache_hit = False graph_buffer = await wrap_in_async( graphs.generate_line_plot, data, name[1].title() if name[1] else "world", logarithmic=log, thread_pool=True) graph_cache[buffer_name] = deepcopy(graph_buffer) else: graph_buffer = copy(graph_buffer) cache_hit = True et = time.perf_counter_ns() f = discord.File(graph_buffer, filename="image.png") tt = et - st e = discord.Embed(title=_("Graph for {0}", name[1].title() if name[1] else 'world'), color=discord.Color.dark_red()) e.set_footer(text=_( "Took {0} seconds ({1} nanoseconds) to generate • Cache Status: {2}", format(round(tt / 1000000000, 1), ","), format(tt, ","), "HIT" if cache_hit else "MISS")) e.set_image(url="attachment://image.png") await ctx.send(file=f, embed=e) if msg: await msg.delete()
else: repeat += 1 repeat_bool = True if repeat_count != 0: count += repeat_count if overfloat: if alphabet_U_bool and alphabet_L_bool and digit_bool: # 3 True if repeat_count > 0: #count -= 1 pass elif (alphabet_L_bool ^ alphabet_U_bool ^ digit_bool): #2 False count += 2 if repeat_count > 2: count -= 2 elif repeat_count > 0: count -= 1 elif not (alphabet_L_bool & alphabet_U_bool & digit_bool): #1 False count += 1 if repeat_count > 0: count -= 1 return count pw = input('Please enter your password\t') result = Solution() start = time.perf_counter_ns() chng = result.strongPasswordChecker(pw) end = time.perf_counter_ns() - start print('Number of change need to be done:', chng) print('Total use time {sec} ms'.format(sec=end / 1000))
parser.add_argument('--data-dir', type=str, default='') args = parser.parse_args() dataset = Dataset() dataset.load_events_train(data_path + 'train_dataset.csv', args.sequence_length, data_path + 'partial_dataset.csv') model = Model(dataset.n_feature_fields, dataset.n_features, 8, 4, 16, 2).to(device) pytorch_total_params = sum(p.numel() for p in model.parameters()) print('%d parameters in total' % pytorch_total_params) print('%d samples in total' % len(dataset)) t_begin = time.perf_counter_ns() train(dataset, model, args) t_end = time.perf_counter_ns() print('Total time cost for training: %fs' % ((t_end - t_begin) / 1000000000.0)) begin_index = np.cumsum(dataset.n_events) - dataset.n_events dataset.initial_data = np.zeros(shape=(dataset.n_procs, dataset.sq_length, dataset.n_feature_fields)) for proc_id in range(dataset.n_procs): dataset.initial_data[proc_id, :, :] = dataset.events[ begin_index[proc_id] + 0:begin_index[proc_id] + dataset.sq_length + 0, :] torch.save(model.state_dict(), data_path + 'trace.model') dataset.serialize(data_path + 'dataset.info')