async def is_vpn(self, ip: str) -> (bool, bool): """ returns (error, is_vpn) """ if self.can_retry(): await self.__connect() text = await self.__fetch(ip) await self.__close() if text == None: return (True, False) result = int(text) if result in [0, 1, 2]: if result in [0, 2]: return (False, False) else: return (False, True) else: log("ERROR_Unexpected", f"(API_IPHub): {text}") return (True, False) else: return (True, False)
def resetFasta(self): # erase the fasta file and clear all items depending on it global fastaRead, userChoices sh.click() self.removeButton.setEnabled(False) self.genTextEdit.clear() self.submit_nWindow.clear() self.transTextEdit.clear() self.protTextEdit.clear() self.phageLabel.clear() self.restrictResults.clear() self.numeralResults.clear() self.REnzSelect.clear() userChoices = [] self.posnResults.clear() self.picture_results.clear() self.submit_nWindow.clear() self.nwindow_results.clear() self.nwindow_results_2.clear() self.target_seq.clear() self.lookupButton.setEnabled(True) self.clearArrayPushButton.setEnabled(False) self.detectPushButton.setEnabled(False) self.nPosPushButton.setEnabled(False) self.find_nWindows.setEnabled(False) self.acceptButton.setEnabled(False) fastaRead = False sh.log("Erased fasta file")
def add_item(self, to_item, name, value, before=False, below=True, attr=False): """execute adding of item""" log('in add_item for {} value {} to {} before is {} below is {}'. format(name, value, to_item, before, below)) if value is None: value = "" itemtext = self.getshortname((name, value), attr) if below: add_under = to_item insert = -1 if not itemtext.startswith(ELSTART): itemlist = self.gui.get_node_children(to_item) for seq, subitem in enumerate(itemlist): if self.gui.get_node_title(subitem).startswith(ELSTART): break if itemlist and seq < len(itemlist): insert = seq else: add_under, insert = self.gui.get_node_parentpos(to_item) print('in base.add_item (not below), insert is', insert) if not before: insert += 1 print('in base.add_item after correction, insert is', insert) item = self.gui.add_node_to_parent(add_under, insert) self.gui.set_node_title(item, itemtext) self.gui.set_node_data(item, name, value) return item
def putFasta(self): global fastaRead sh.log("\nstart putFasta") sh.click() self.removeButton.setEnabled(True) self.lookupButton.setEnabled(False) self.find_nWindows.setEnabled(True) fastaName, fasta = fd.readFasta() self.phageLabel.setText("{:s}: {:s}".format(fastaName, fasta.phage)) # Ensure that the purine length is a multiple of 3. Truncate if needed. purnz = fasta.purines[:3 * ((len(fasta.purines) // 3))] self.genTextEdit.insertPlainText(purnz) self.submit_nWindow.setText(purnz) self.rawLabel_2.setText( "Use {:s} sequence or paste a new one below".format(fastaName)) self.sequence = Seq(purnz) RNA = self.sequence.transcribe() # DNA sequence -> RNA sequence self.transTextEdit.insertPlainText(str(RNA)) protein = RNA.translate("Standard", "#") # nucleotide sequence -> protein sequence self.protTextEdit.insertPlainText(str(protein)) (act, gct, cct, tct) = (self.sequence.count(x) for x in ('A', 'G', 'C', 'T')) gcCount = (gct + cct) / (act + gct + cct + tct) * 100 self.gcLabel.setText("{:5.2f}%".format(gcCount)) atgcRatio = ((act + tct) / (gct + cct)) self.atgcLabel.setText("{:4.2f}".format(atgcRatio)) fastaRead = True
def find_successor(self, key): """ Locates the best successor of a given node given a certain key. Options are set upon creation of the object, this is done by setting search_type Available options are 'linear' and 'finger' """ if self.search_type == 'linear': # Linear search always returns next node in line return self.fingertable[0][0] elif self.search_type == 'finger': # Attempt to find the best successor in the finger table: prev = None for index, (host, lower, upper) in enumerate(self.fingertable): # For every succesor, check if it is # 1 In range of the keys of the succesor, we can just return it if that is the case if int(key) >= lower and int(key) <= upper: return host # 2 In range of the current node and the next node in the list, in this case we return the previous # 2.1 Check if the current element is the last element in the list and return last if len(self.fingertable) == index + 1: return host # 2.2 Check if it is in range of the current node + the next one if self.is_in_range_of(key, lower, self.fingertable[index + 1][1]): return host # If this loop completes, it means that it was out of range for all of the nodes in the finger table and # thus we forward it to the last node known in hte finger table # This also should never happen shared.log( f'There\'s a bug in the iteration of the finger table, this occured for key: {key} on host {self.hostname} with finger table: {self.fingertable}', 'error') return self.fingertable[-1][0] else: shared.log(f'search_type invalid: {self.search_type}')
def selectedFile(): # The user has selected a .fasta file for analysis global son, choice sh.click() choice = son.sonAList.currentItem().text() sh.log("son choice: {:s}".format(choice)) son.close() # close the son dialog
def getFasta(): # have the user select a .fasta file, and return its contents global son, choice sh.log("start son") son = sonAWdw() son.exec_() # Start son. Wait until son finishes sh.log("choice = " + choice) return choice, fasta("data/" + choice)
def clearArray(self): global userChoices sh.click() userChoices = [] self.REnzSelect.clear() sh.log("Clear choices") self.detectPushButton.setEnabled(len(userChoices) > 0) self.clearArrayPushButton.setEnabled(False) self.detectPushButton.setEnabled(False)
def serve(nodeId, port): server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) keyvaluestore_pb2_grpc.add_KeyValueStoreServicer_to_server( keyvaluestore.KeyValueStore(f'{nodeId}:{port}'), server) server.add_insecure_port(f'{nodeId}:{port}') server.start() shared.log(f'Started GRPC server on port: {port}') server.wait_for_termination()
def benchmark_performance_writes(): """ Performance benchmark, returns the average time it takes for a write query to complete. It also measures the average hops it took to reach the destination node """ reps = 10 start = time.perf_counter() results = [] r_2d = [] for _ in range(reps): with ThreadPoolExecutor(10) as workers: results = [workers.submit(query_wrapper, i) for i in range(1000)] # Convert and append to existing results r_2d.extend([list(i.result()) for i in results]) r_2d = np.array(r_2d) stop = time.perf_counter() - start avg_job_time = np.mean(r_2d[:, 0]) * 1000 avg_hops = np.mean(r_2d[:, 1]) filename = f'benchmark_performance_writes_{int(round(time.time()))}' # Save it to a persitent file shared.log(f'{avg_job_time} {avg_hops}', filename) for q in r_2d: shared.log(f'{q[0]} {q[1]}', filename) # Notify user using terminal that the benchmark has finished shared.log(f'Benchmark finished in {stop}s') shared.log( f'Average job running time: {avg_job_time}ms; average hops: {avg_hops}' )
def step(self): global mutex, server_infos, all_players addresses = get_sever_addresses(get_master_servers()) servers = get_server_infos(addresses) if len(addresses) > 0 and len(servers) > 0: mutex.acquire() server_infos = servers all_players = get_players_info(server_infos) log("teeworlds", f"Servers: {len(server_infos)} Players: {len(all_players)}") mutex.release()
async def __fetch(self, ip: str) -> str: headers = {'X-Key': self.api_key} async with self.session.get(f"http://v2.api.iphub.info/ip/{ip}", headers=headers) as response: if response.status == 200: json = await response.json() self.reset_cooldown() return json['block'] else: text = await response.text(encoding='utf-8') log("ERROR", f"(API_IPHub)[{response.status}]: {text}") self.increase_cooldown() return None
def __init__(self, src, reset=False): if reset: fasta.crntAlfa = 0 # this file ID is alpha self.source = src try: fas = open(src) self.phage = fas.readline()[1:].replace( "\n", "") # 1st line: name of the phage self.data = fas.read() # all the rest of the file self.purines = self.data[:].replace("\n", "") # One long string except: print("No valid .fasta file chosen: ", src) sh.log("No valid .fasta file chosen: " + src) exit(12) fas.close() self.ID = alphabet[ fasta.crntAlfa] # Assign a unique name from the alphabet fastaFiles[self.ID] = self fasta.crntAlfa += 1
def update_ips(self): global mutex, ips lines = [] mutex.acquire() something_changed = len(ips) > self.ip_count and len(ips) > 0 if something_changed: for key, value in ips.items(): lines.append(f"{key} {int(value)}\n") self.ip_count = len(ips) mutex.release() if something_changed: lines.sort() log("vpn", f"Writing {len(lines)} IPs") with open(self.ips_file, 'w') as f: f.writelines(lines) log("vpn", f"Done writing IPs!")
def read_ips(self): if os.path.exists(self.ips_file): global mutex, ips tmp_dict = {} with open(self.ips_file) as f: for line in f: tokens = line.split(" ") key = tokens[0].strip() try: is_vpn = int(tokens[1]) except: is_vpn = 0 tmp_dict[key] = bool(is_vpn) mutex.acquire() ips = tmp_dict self.ip_count = len(ips) mutex.release() log("vpn", f"Loaded {self.ip_count} IPs.")
async def __fetch(self, ip: str) -> bool: async with self.session.get( f"https://ip.teoh.io/api/vpn/{ip}") as response: if response.status == 200: try: json = await response.json(content_type='text/plain') is_hosting = int(json['is_hosting']) == 1 vpn_or_proxy = json['vpn_or_proxy'] == "yes" self.reset_cooldown() return is_hosting or vpn_or_proxy except Exception: text = await response.text(encoding='utf-8') log("ERROR", f"(API_IP_Teoh_IO): {text}") else: text = await response.text(encoding='utf-8') log("ERROR", f"(API_IP_Teoh_IO)[{response.status}]: {text}") self.increase_cooldown() return None
def addREnzyme(self): # select enzymes to be used in the Restrict results tab global userChoices sh.log("\nstart addREnzyme") sh.click() if sh.debug: userChoices = [ "AanI", "BmeDI", "ZraI", "AquIII", "YkrI", "Bau1417V", "XmnI", "Ble402II" ] self.REnzSelect.clear() for enz in userChoices: self.REnzSelect.insertPlainText("{:s}\n".format(enz)) else: choice = self.REnzList.currentItem().text() sh.log("choice:" + choice) if choice in userChoices: userChoices.remove(choice) # remove existing item else: userChoices.append(choice) # add this item sh.log("user choices:" + str(userChoices)) self.REnzSelect.clear() for enz in userChoices: self.REnzSelect.insertPlainText("{:s}\n".format(enz)) self.detectPushButton.setEnabled(len(userChoices) > 0) self.clearArrayPushButton.setEnabled(len(userChoices) > 0)
def SetValue(self, request, context): # Entry point, current node if self.is_in_range_of(request.key, self.keyrange_lower, self.keyrange_upper + 1): # Also tell its predecessor that we he has to maintain it's copy if request.type != 'replication': with grpc.insecure_channel(self.predecessor) as channel: stub = keyvaluestore_pb2_grpc.KeyValueStoreStub(channel) request.type = 'replication' response = stub.SetValue(request) # Set our own key self.table[request.key] = request.value return keyvaluestore_pb2.SetResponse(key=request.key, hops=0) # Entry point: predecessor elif self.is_in_range_of(request.key, self.fingertable[0][1], self.fingertable[0][2] + 1): # if it is in range of the successor, we can update table locally and tell successor to also update if request.type != 'replication': with grpc.insecure_channel(self.fingertable[0][0]) as channel: stub = keyvaluestore_pb2_grpc.KeyValueStoreStub(channel) request.type = 'replication' response = stub.SetValue(request) # Set our own key self.table[request.key] = request.value return keyvaluestore_pb2.SetResponse(key=request.key, hops=0) else: # Else forward it to next node response = None successor = self.find_successor(request.key) try: with grpc.insecure_channel(successor) as channel: stub = keyvaluestore_pb2_grpc.KeyValueStoreStub(channel) response = stub.SetValue(request) except: e = sys.exc_info()[0] shared.log( f'An error occured whilst attempting to reach {successor} the following exception was raised:') shared.log(f'{e}') response.hops += 1 return response
def acceptFastas(self): # Show the chosen files sh.log("\nstart acceptFastas") sh.click() self.acceptButton.setEnabled(False) savFile = "data/all.fasta" if Path(savFile).is_file(): Path(savFile).unlink() # delete old all.fasta sav = open(savFile, "a") # open for append self.showSelects.clear() first = True # first file ID = alfa for choice in self.fastaChoices: fast = fd.fasta("data/" + choice, reset=first) self.showSelects.insertPlainText("{:s}: {:s} -> {:s}\n".format( fast.ID, choice, fast.phage)) self.showSelects.insertPlainText( "length = {:,d} > {:s} ... {:s}\n\n".format( len(fast.purines), fast.purines[:20], fast.purines[-20:])) print("> {:s} {:s}".format(fast.ID, fast.phage), file=sav) print(fast.data, file=sav) # copy choice to all.fasta first = False sav.close() self.showSelects.insertPlainText( "The concatenation of these choices saved as {:s}".format(savFile))
def readFasta(): # Read a fasta file if sh.debug: fastaName, fast = "NHagos.fasta", fasta("data/NHagos.fasta") else: fastaName, fast = getFasta() sh.log(".fasta file: " + fast.source) sh.log("phage: " + fast.phage) for key, value in fastaFiles.items(): sh.log("{:s}: {:s}".format(key, value.phage)) return fastaName, fast
def addFile(self): sh.click() sh.log("\nstart addFile") choice = self.dataList.currentItem().text() sh.log("choice: " + choice) if choice in self.fastaChoices: self.fastaChoices.remove(choice) # remove existing item else: self.fastaChoices.append(choice) # add this item sh.log(".fasta choices: " + str(self.fastaChoices)) self.fastaSelects.clear() for fast in self.fastaChoices: self.fastaSelects.insertPlainText("{:s}\n".format(fast)) self.acceptButton.setEnabled(len(self.fastaChoices) > 0)
def node_summary(self): shared.log('=============================') shared.log(f'Summary for node: {self.hostname} ({self.nodeindex})') shared.log(f'Key capacity: {len(self.table)}/{self.keyrange_upper - self.keyrange_lower + 1}') shared.log( f'Key range: [{self.keyrange_lower}-{self.keyrange_upper}], [{self.fingertable[0][1]}-{self.fingertable[0][2]}]') shared.log(f'Successor: {self.fingertable[0]}') shared.log(f'Predecessor: {self.predecessor}') shared.log(f'Finger table: {self.fingertable}')
def markov(self): # Markov Model Algorithm gathered from Drexel University # https://faculty.coe.drexel.edu/gailr/ECE-S690-503/markov_models.ppt.pdf # Equation used aBA=Pr(xi=B|xi-1=A) sh.log("\nstart markov") sh.click() self.nPosPushButton.setEnabled( False) # Need new fasta to run this again seq = 'ATGC' single = [x for x in seq] double = [x + y for x in seq for y in seq] triple = [x + y + z for x in seq for y in seq for z in seq] monograms = { monos: self.sequence.count(monos) / len(self.sequence) for monos in single } mono_counts = sum(monograms.values()) # Must be = 1.0 # The following algorithm finds the probability of a dinucleotides in a sequence. -------------------------------------- # DIGRAMS are used so the full len is found. --------------------------------------------------------------------------- adjusted_sequence = self.sequence[:-1] bi_monograms = { items: adjusted_sequence.count(items) for items in single } bigrams = { items: self.sequence.count(items) / bi_monograms[items[0]] for items in double } sh.log("monograms: " + str(monograms)) sh.log("bi monograms " + str(bi_monograms)) sh.log("bigrams " + str(bigrams)) sh.log("double = " + str(double)) bi = {x + y: bigrams[x + y] for x in seq for y in seq} self.posnResults.insertPlainText("Results 20\n") # The following algorithm finds the probability of a dinucleotides in a sequence. -------------------------------------- # TRIGRAMS are used so the full len is found. --------------------------------------------------------------------------- adjusted_sequence = self.sequence[:-2] tri_monograms = { items: adjusted_sequence.count(items) for items in single } tri_bigrams = { items: adjusted_sequence.count(items) for items in double } trigrams = { items: self.sequence.count(items) / tri_bigrams[items[:-1]] for items in triple } sh.log("tri_monograms " + str(tri_monograms)) sh.log("tri_bigrams " + str(tri_bigrams)) sh.log("trigrams " + str(trigrams)) self.posnResults.insertPlainText("Results 30") tri = { x + y + z: trigrams[x + y + z] for x in seq for y in seq for z in seq } self.posnResults.clear() # Generate report on the probabilities sep = '-----------------------------------------' rpt = "{:s}\nMONOGRAM PROBABILITIES\n\n".format(sep) for mon in seq: rpt += "{:s}: {:11.9f} \n".format(mon, monograms[mon]) rpt += "\n\nTotal = {:3.1f}\n{:s}\nBIGRAM PROBABILITIES\n\n".format( sum(monograms.values()), sep) for duo in double: rpt += "{:2s}: {:11.9f} \n".format(duo, bi[duo]) if duo[1] == "C": rpt += "\n" rpt += "\nTotal = {:3.1f}\n{:s}\nTRIGRAM PROBABILITIES\n\n".format( sum(bi.values()), sep) spc = 4 for tre in triple: rpt += "{:s}: {:11.9f} \n".format(tre, tri[tre]) if tre[2] == "C": rpt += "\n" spc -= 1 if spc <= 0: rpt += "\n" # blank line between groups of 4 spc = 4 rpt += "Total = {:3.1f}\n{:s}\n".format(sum(tri.values()), sep) #sh.log(str(rpt)) self.posnResults.clear() # Ensure we print to a blank window self.posnResults.insertPlainText(rpt) # --------------------------------------------------------- # Create a Bar Graph of all transition probabilities. # prob = dictionary of mono-, bi-, tri- grams # Each dictionary item has 2 lists: # [leprober code] and corresponding [probability] # --------------------------------------------------------- prob = {xx: [[], []] for xx in seq} getStates(prob, monograms) getStates(prob, bigrams) getStates(prob, trigrams) for key, value in prob.items(): sh.log("prob[{:s}]: {:s}".format(key, str(value[0]))) sh.log(" {:s}".format(str(value[1]))) fig, a = plt.subplots(2, 2) graphs = [a[0][0], a[0][1], a[1][0], a[1][1]] fig.set_size_inches(10, 8) a[0][0].bar(prob["A"][0], prob["A"][1]) a[0][0].set_title('p(A) Transition States', fontsize=14) a[0][1].bar(prob["T"][0], prob["T"][1]) a[0][1].set_title('p(T) Transition States', fontsize=14) a[1][0].bar(prob["C"][0], prob["C"][1]) a[1][0].set_title('p(C) Transition States', fontsize=14) a[1][1].bar(prob["G"][0], prob["G"][1]) a[1][1].set_title('p(G) Transition States', fontsize=14) for subs in graphs: plt.setp(subs.xaxis.get_majorticklabels(), rotation=90) subs.set_ylim(0, 1) plt.tight_layout(pad=1.5) if not Path('pictures').exists(): Path('pictures').mkdir() # create the directory if it is missing plt.savefig('pictures/Results.png', dpi=100) self.picture_results.setPixmap(QPixmap('pictures/Results.png'))
def find_windows(self): # creates a window of information based on user parameters that can be submitted to other sequence software # Raw to Defined to string[ start_index_pos: end_index_pos: step_size] global nwindow sh.click() sh.log("\nStart find_windows") length = int(self.nWindow_length.value()) # First, identify user's FRAME reference. if self.frame_select.currentText() == 'Frame 1': #temp_hold = str(self.submit_nWindow.text()) temp_hold = self.submit_nWindow.text() elif self.frame_select.currentText() == 'Frame 2': temp_hold = self.submit_nWindow.text()[1:] elif self.frame_select.currentText() == 'Frame 3': temp_hold = self.submit_nWindow.text()[2:] elif self.frame_select.currentText() == 'Frame -1': temp_hold = self.submit_nWindow.text()[::-1] elif self.frame_select.currentText() == 'Frame -2': temp_hold = self.submit_nWindow.text()[-2::-1] elif self.frame_select.currentText() == 'Frame -3': temp_hold = self.submit_nWindow.text()[-3::-1] # Second, use BIOPYTHON to translate the initial text using the central dogma if self.type_nwindow.currentText() == 'basic': nwindow = Seq(temp_hold).complement() elif self.type_nwindow.currentText() == 'transcribe': nwindow = Seq(temp_hold).transcribe() elif self.type_nwindow.currentText() == 'translate': nwindow = Seq(temp_hold).translate() #print("nwindow", nwindow[:80]) window_out = [ str(nwindow)[i:i + length] for i in range(0, len(nwindow), length) ] sh.log("window_out ({:n}): {:s}".format(len(window_out), str(window_out)[:80])) # Third, identify the USER requested MOTTIF target and display the # Check that the user MTT entry is valid self.nwindow_results_2.clear() tgtMTT = self.target_seq.text().upper() # ensure MTT is all capitals if len(tgtMTT) > length: self.nwindow_results_2.insertPlainText \ ("MTT length must be <= {:n}".format (length)) else: if len(tgtMTT) == 0: self.nwindow_results_2.insertPlainText("No MTT selected") else: targetSet = set() # set of all MTT matches for finds in window_out: if finds.find(tgtMTT) >= 0: targetSet.add(finds) if len(targetSet) == 0: self.nwindow_results_2.insertPlainText( 'No matches found for {:s} ({:s})'.format( tgtMTT, self.type_nwindow.currentText().upper())) else: self.nwindow_results_2.insertPlainText( 'Target Spotted! - Displaying your {:s} ({:s}) report below\n\n' \ .format (tgtMTT, self.type_nwindow.currentText().upper())) for finds in targetSet: self.nwindow_results_2.insertPlainText(finds + '\n') for strings in window_out: if len(strings) < length: window_out.pop(window_out.index(strings)) xlate = str(list(window_out)).maketrans( "", "", "'[],") # don't show "[],'" self.nwindow_results.setPlainText( str(list(window_out)).translate(xlate))
def listFastaFiles(self): # Show available data files in the FastX tab sh.log("\nstart listFastaFiles") fd.showValidFastaFiles(self.dataList)
async def __fetch(self, ip: str) -> str: params = {'ip': ip, 'contact': self.email} async with self.session.get("http://check.getipintel.net/check.php", params=params) as response: if response.status == 200: self.reset_cooldown() return await response.text(encoding='utf-8') elif response.status == 400: error_code = await response.text(encoding='utf-8') try: error_code = int(error_code) except: pass if error_code == -1: log( "ERROR", f"(API_GetIPIntel_Net)[{response.status}]: Invalid no input" ) elif error_code == -2: log( "ERROR", f"(API_GetIPIntel_Net)[{response.status}]: Invalid IP address" ) elif error_code == -3: log( "ERROR", f"(API_GetIPIntel_Net)[{response.status}]: Unroutable address / private address" ) elif error_code == -4: log( "ERROR", f"(API_GetIPIntel_Net)[{response.status}]: Unable to reach database, most likely the database is being updated. Keep an eye on twitter for more information." ) elif error_code == -5: log( "ERROR", f"(API_GetIPIntel_Net)[{response.status}]: Your connecting IP has been banned from the system or you do not have permission to access a particular service. Did you exceed your query limits? Did you use an invalid email address? If you want more information, please use the contact links below." ) elif error_code == -6: log( "ERROR", f"(API_GetIPIntel_Net)[{response.status}]: You did not provide any contact information with your query or the contact information is invalid." ) else: log( "ERROR", f"(API_GetIPIntel_Net)[{response.status}]: {error_code}" ) elif response.status == 429: log( "ERROR", f"(API_GetIPIntel_Net)[{response.status}]: If you exceed the number of allowed queries, you'll receive a HTTP 429 error." ) else: text = await response.text(encoding='utf-8') log("ERROR", f"(API_GetIPIntel_Net)[{response.status}]: {text}") # error case, increase cooldown and set new retry self.increase_cooldown() return None
res.append(ipaddress.ip_network(n)) return res if __name__ == "__main__": # credentials load_dotenv() discord_token = os.getenv('DISCORD_TOKEN') email = os.getenv('EMAIL') iphub_token = os.getenv('IPHUB_TOKEN') is_valid = validate_email(email) if not is_valid: log( "FATAL", "Passed email is not valid, please use a non made up email address" ) sys.exit(1) # initialize global variables mutex = threading.Lock() server_infos = dict() all_players = [] ips = dict() # https://en.wikipedia.org/wiki/Reserved_IP_addresses invalid_vpn_networks = fill_invaild_networks([ "0.0.0.0/8", "10.0.0.0/8", "100.64.0.0/10", "127.0.0.0/8",
async def on_message(self, message): global mutex, server_infos, all_players, ips, email, iphub_token, invalid_vpn_networks if message.author == self.user: return text = message.content if text.startswith("!help"): await message.channel.send( """Teeworlds Discord Bot by jxsl13. Have fun. Commands: **!p[layer]** <player> - Check whether a player is currently online **!o[nline]** <gametype> - Find all online servers with a specific gametype **!o[nline]p[layers]** <gametype> - Show a list of servers and players playing a specific gametype. **!vpn** <IP> - check if a given IP is actually a player connected via VPN(this feature doesn't work on servers, PM the bot.). **!ip_filter** <text> - given a random text, the bot will return all unique IPs of that text. """) elif text.startswith("!player ") or text.startswith("!p "): tokens = text.split(" ", maxsplit=1) if len(tokens) != 2: return mutex.acquire() player = find_player(tokens[1], all_players) mutex.release() if player: await message.channel.send( f"'{escape(player['name'])}' is currently playing on '{escape(server_infos[player['address']]['name'])}'" ) else: await message.channel.send( f"No such player found: '{tokens[1]}'") elif text.startswith("!online ") or text.startswith("!o "): tokens = text.split(" ", maxsplit=1) if len(tokens) != 2: return mutex.acquire() servers = find_online_servers(tokens[1], server_infos) mutex.release() answer = "" if len(servers) > 0: line = "" for server in servers: line = f"\n**{escape(server['name'])}** ({server['num_players']} Players)" if len(answer) + len(line) > 2000: await message.channel.send(answer) answer = line else: answer += line else: answer = f"No online servers with gametype '{tokens[1]}' found!" if len(answer) > 0: await message.channel.send(answer) elif text.startswith("!onlineplayers ") or text.startswith("!op "): tokens = text.split(" ", maxsplit=1) if len(tokens) != 2: return mutex.acquire() servers = find_online_servers(tokens[1], server_infos) mutex.release() answer = "" if len(servers) > 0: for server in servers: answer = f"\n**{escape(server['name'])}** ({server['num_players']} Players)" answer += "\n```" for player in server['players']: name = player['name'] clan = player['clan'] player_type = "(bot)" if player['player'] >= 2 else "" answer += "\n{:<{name_width}} {:>{clan_width}} {player_type}".format( name, clan, name_width=16, clan_width=12, player_type=player_type) answer += "```\n" await message.channel.send(answer) answer = "" else: answer = f"No online servers with gametype '{tokens[1]}' found!" if len(answer) > 0: await message.channel.send(answer) elif text.startswith("!vpn "): if message.channel.type is not discord.ChannelType.private: await message.channel.send( "This feature is only available via PM. Please send a private message." ) return tokens = text.split(" ") if len(tokens) < 2: return valid_ips = [x for x in tokens if is_valid_ip(x)] if len(valid_ips) == 0: await message.channel.send("Invalid IP address(es) provided.") return if not ENABLE_MASS_VPN_CHECK: if len(valid_ips) >= 16: valid_ips = valid_ips[:16] for ip in valid_ips: is_vpn = False # check if ip is in reserved networks __ip = ipaddress.ip_address(ip) for network in invalid_vpn_networks: if __ip in network: await message.channel.send( f"The IP '{ip}' is part of a reserved IP range which should not be accessible to humans." ) return is_ip_known = True mutex.acquire() try: is_vpn = ips[ip] except KeyError: is_ip_known = False mutex.release() if not is_ip_known: log("vpn", f"Unknown IP: {ip}") # ip is unknown got_resonse = False # if one api says yes, we save the ip as vpn for idx, api in enumerate(self.apis): log("vpn", f"Checking API {idx +1}/{len(self.apis)}") err, is_vpn = await api.is_vpn(ip) if err: cooldown = api.get_remaining_cooldown() log("vpn", f"Skipping API {idx +1}/{len(self.apis)}") if cooldown > 0: log("cooldown", f"{cooldown} seconds left.") continue else: got_resonse = True if is_vpn: log("vpn", "Is a VPN!") mutex.acquire() ips[ip] = True mutex.release() break if not got_resonse: await message.channel.send( f"Could not retrieve any data for IP '{ip}', please try this command another time." ) continue elif not is_vpn: # got response and none of th eapis said that the ip is a VPN mutex.acquire() log("vpn", "Is not a VPN") ips[ip] = False mutex.release() else: # known ip, do nothing, just send the message log("vpn", f"Known IP: {ip}") # inform the player about whether the ip is a vpn or not string = "not" if is_vpn: ip = f'**{ip}**' string = "" await message.channel.send(f"The IP '{ip}' is {string} a VPN") elif text.startswith("!ip_filter "): if message.channel.type is not discord.ChannelType.private: await message.channel.send( "This feature is only available via PM. Please send a private message." ) return tokens = text.split(" ", maxsplit=1) if len(tokens) < 2: return ipv4_pattern = r"(?:(?:1\d\d|2[0-5][0-5]|2[0-4]\d|0?[1-9]\d|0?0?\d)\.){3}(?:1\d\d|2[0-5][0-5]|2[0-4]\d|0?[1-9]\d|0?0?\d)" res = re.findall(ipv4_pattern, tokens[1]) unique_ips = sorted(list(set(res))) if len(unique_ips) == 0: await message.channel.send("No IPs found!") return answer = "!vpn" for ip in unique_ips: answer = f"{answer} {ip}" await message.channel.send(answer)
def query_wrapper(i): shared.log(f'Executing {i}', 'client') ms, hops = queries.execute_query(queries.run_kv_writes) shared.log(f'Finished: {i} in {ms}s', 'client') return ms, hops
def run_p(self): global userChoices, enzymes, fastaRead sh.log("\nstart run_p") sh.click() self.restrictResults.clear() self.numeralResults.clear() if not fastaRead: self.restrictResults.setPlainText( "You must select a fasta file first") return if len(userChoices) <= 0: self.restrictResults.setPlainText( "You must select R.Enzymes first") return self.detectPushButton.setEnabled(False) # can't run twice try: linear = self.linearCheckBox.isChecked() analysis = Analysis(userChoices, self.sequence, linear=linear) except: sh.log("analysis failed " + sys.exc_info()[0]) # print each enzyme with a list of it's matching sites cutSites = str( analysis.format_output( dct=None, title='', s1='\n Enzymes which do not cut the sequence\n')) self.restrictResults.setPlainText(cutSites) # ------------------------------- FIND PALINDROME HIT COUNTS ----------------------------------------------- try: endMarker = "END" enzymes.append(endMarker) # Extract enzymes and the index of their cutSites from cutSites palin = cutSites[:cutSites.find("Enzymes")].replace( '.', "").replace(':', "").split() palin.append(endMarker) sh.log("palin: " + str(palin)) except: sh.log("palin NG " + sys.exec_info()[0]) try: # Calculate and display the number of matching sites for each enzyme # enzPosn initally has a list of lists. Each sublist has the enzyme name # and the index of the enzyme in palin # enzPosn sublist later has the enzyme name and the number of matches. enzPosn = [] enzNone = [] sh.log("len palin " + str(len(palin))) sh.log("user choices " + str(userChoices)) allChoices = userChoices allChoices.append(endMarker) # matches last name in palin sh.log("allChoices " + str(allChoices)) for enz in allChoices: if enz in palin: enzPosn.append([enz, palin.index(enz)]) else: sh.log(enz + " not in palin") enzNone.append(enz) sh.log("enzPosn = " + str(enzPosn)) enzPosn.sort(key=lambda x: x[1]) # sort on index of name in palin for i in range(len(enzPosn) - 1): # Replace the index with the enzPosn[i][1] = enzPosn[ i + 1][1] - enzPosn[i][1] - 1 # length of palin entry del enzPosn[-1] # delete endMarker for enz in enzNone: enzPosn.append([enz, 0]) # add in enzymes not found; length = 0 enzPosn.sort(key=lambda x: x[0]) # sort on name sh.log("enzPosn = " + str(enzPosn)) for i in range(len( enzPosn)): # show the number of matches for each enzyme matchStr = "{0:7,d} : {1:s}\n\n".format( enzPosn[i][1], enzPosn[i][0]) self.numeralResults.insertPlainText(matchStr) except: sh.log('I cannot do that. ' + sys.exec_info()[0]) self.detectPushButton.setEnabled(False) self.nPosPushButton.setEnabled(True)