def _get_candidates(self) -> typing.List[Span]: """ Return a list of spans with candidate topics, such that the start of each candidate is a noun chunk that was trimmed of stopwords or tokens with POS tags that we wish to ignore. returns: list of candidate spans """ candidates: typing.List[Span] = [] try: noun_chunks = list(self.doc.noun_chunks) for chunk in noun_chunks: for token in chunk: if self._keep_token(token): candidates.append(self.doc[token.i:chunk.end]) break except NotImplementedError as ex: # some languages don't have `noun_chunks` support in spaCy models, e.g. "ru" ic.disable() ic(ex) ic.enable() return candidates
def __init__(self, rootdir, copy=True, dry=False, transforms=[], pattern='', train_size=0.8, test_size=0.2, debug=True, out='', save_test=True, save_train=True): if not debug: ic.disable() self.dry = dry self.rootdir = ic(rootdir) self.transforms = ic(transforms) self.pattern = pattern self.train_size = train_size self.test_size = test_size self.classes = [] self.filenames = [] self.out = ic(out) if out != '' else rootdir self.copy = copy if dry and out == '': self.out = tempfile.mkdtemp()
def main(): # ic.enable() ic.disable() # 出力側のスレッド - メッセージを受け取る out_ths_led = LedThread() out_que_led = out_ths_led.rcv_que out_ths_led.start() # メッセージの交通整理のスレッド - メッセージの交通整理 pre_th = PreThread({"led": out_que_led, }) que_pre = pre_th.rcv_que pre_th.start() # 入力側のスレッド - メッセージを送信する in_ths_sw = SwThread(que_pre) in_ths_sw.start() try: while True: time.sleep(5) except KeyboardInterrupt: print("stop") # 終了処理 out_ths_led.stop() in_ths_sw.stop() return
def calc_textrank(self) -> typing.List[Phrase]: """ Iterate through each sentence in the doc, constructing a [*lemma graph*](https://derwen.ai/docs/ptr/glossary/#lemma-graph) then returning the top-ranked phrases. This method represents the heart of the *TextRank* algorithm. returns: list of ranked phrases, in descending order """ t0 = time.time() self.reset() self.lemma_graph = self._construct_graph() # to run the algorithm, we use the NetworkX implementation # for PageRank (i.e., based on eigenvector centrality) # to calculate a rank for each node in the lemma graph self.ranks = nx.pagerank( self.lemma_graph, personalization=self.get_personalization(), ) # agglomerate the lemmas ranked in the lemma graph into ranked # phrases, leveraging information from earlier stages of the # pipeline: noun chunks and named entities nc_phrases: typing.Dict[Span, float] = {} try: nc_phrases = self._collect_phrases(self.doc.noun_chunks, self.ranks) except NotImplementedError as ex: # some languages don't have `noun_chunks` support in spaCy models, e.g. "ru" ic.disable() ic(ex) ic.enable() ent_phrases: typing.Dict[Span, float] = self._collect_phrases( self.doc.ents, self.ranks) all_phrases: typing.Dict[Span, float] = {**nc_phrases, **ent_phrases} # since noun chunks can be expressed in different ways (e.g., may # have articles or prepositions), we need to find a minimum span # for each phrase based on combinations of lemmas raw_phrase_list: typing.List[Phrase] = self._get_min_phrases( all_phrases) phrase_list: typing.List[Phrase] = sorted(raw_phrase_list, key=lambda p: p.rank, reverse=True) t1 = time.time() self.elapsed_time = (t1 - t0) * 1000.0 return phrase_list
def final(): try: print("Desea realizar una nueva busqueda \n") valor = input("Ingrese Si / No: ") ic.disable() ic(new_search(valor)) except Exception: logging.error("Validar la opción (Si / No):") exit(1)
def find_devices(IPV4): try: db = get_db() # Conexiíon a la BD valor = 0 Ipv4Bd = '' search = db.Devices.find({'Direccion': IPV4}) for r in search: Ipv4Bd = r['Direccion'] ic.disable() ic(Ipv4Bd) estadoBd = r['Estado'] ic.disable() ic(estadoBd) fechaBd = r['Fecha'] ic.disable() ic(fechaBd) if (Ipv4Bd != ''): # Existe! if (estadoBd == True): # Existen Puertos Abiertos ic.disable() ic(estadoBd) Tiempoconsulta = 30 # Tiempo en días. valor = DateTime(fechaBd, Tiempoconsulta) ic.enable() ic(valor) else: ic.disable() ic(estadoBd) Tiempoconsulta = 15 # Tiempo en días. valor = DateTime(fechaBd, Tiempoconsulta) ic.enable() ic(valor) else: # No Existe! valor = 0 #print ("No existe la direccion IPV4 ingresada",band) return valor except Exception: logging.error( "Al buscar la Direccion IPv4 : %s en la base de datos. find_devices()", IPV4) exit(1)
def testEnableDisable(self): with disableColoring(), captureStandardStreams() as (out, err): assert ic(a) == 1 assert ic.enabled ic.disable() assert not ic.enabled assert ic(b) == 2 ic.enable() assert ic.enabled assert ic(c) == 3 pairs = parseOutputIntoPairs(out, err, 2) assert pairs == [[('a', '1')], [('c', '3')]]
def main(): args = sys.argv[1:] max_num_args = 3 if len(args) > max_num_args: print( "Usage: python3 plot_stats.py <experiment_dir> [--ts=min_timestamp] [--debug]" ) exit(1) experiment_dir = "" min_timestamp = 0 debug = False for arg in args: if '--ts=' in arg: min_timestamp = float(arg.split('=')[1]) elif '--debug' == arg: debug = True elif experiment_dir == "": experiment_dir = arg if debug: ic.enable() else: ic.disable() server_nodes_prefix = 'server_nodes' client_nodes_prefix = 'client_nodes' with open(f'{experiment_dir}/info.json', 'r') as f: info = json.load(f) nodes = info['nodes'] server_nodes = nodes['server'] client_nodes = nodes['client'] plot_bandwidths(min_timestamp, experiment_dir, server_nodes, server_nodes_prefix) plot_cpu_mem_stats(min_timestamp, experiment_dir, server_nodes, server_nodes_prefix) plot_bandwidths(min_timestamp, experiment_dir, client_nodes, client_nodes_prefix) plot_cpu_mem_stats(min_timestamp, experiment_dir, client_nodes, client_nodes_prefix)
def capturadepantalla(ip, puerto): setdefaulttimeout(30) try: nombreimagen = "Noimagen.png" #browser=""#UnboundLocalError: local variable 'browser' referenced before assignment optionsChr = webdriver.ChromeOptions() optionsChr.add_argument("--headless") optionsChr.add_argument('--disable-gpu') optionsChr.add_argument('--log-level=3') optionsChr.set_capability("acceptInsecureCerts", True) optionsChr.add_argument("--incognito") optionsChr.add_argument('--ignore-certificate-errors') optionsChr.add_argument('--version') browser = webdriver.Chrome( executable_path= r'C:\\IoT_Divices_ESFOT\\FirefoxDriver\\chromedriver.exe', options=optionsChr) browser.implicitly_wait(10) browser.set_page_load_timeout(10) browser.get("http://{0}".format(ip) + ":" + str(puerto)) nombreimagen = str(ip) + "," + str( puerto) + ".png" # Nombre de la Img. sleep(1) ic.enable() ic(nombreimagen) screenshot = browser.get_screenshot_as_file( r"C:\\IoT_Divices_ESFOT\\capturas\\" + str(nombreimagen)) # Bool ic.disable() ic(screenshot) state = screenshot ic.disable() ic("screenshot", state) browser.close() except Exception: state = False nombreimagen = "Noimagen.png" return nombreimagen print("Captura Exitosa!") return nombreimagen
def __init__(self, lines: list, options, inFilename: str): """Initialize a few class variables in preparation for our walk.""" self.lines = lines self.options = options self.inFilename = inFilename self.docLines = [] ic.configureOutput(includeContext=True) ic.enable() if self.options.debug else ic.disable()
def move(cups): """ { Perform one move of the cups by crab's rules. "Current" is assumed to be at left side of the deque at function call.} :param cups: The cups :type cups: Typing.deque """ # Save icecream state? restore = ic.enabled ic.disable() # Get current cup current = cups[0] ic(cups) ic(current) # Rotate the current cup to tail, pop the pickup cups cups.rotate(-1) pickup = (cups.popleft(), cups.popleft(), cups.popleft()) ic(pickup) # Get destination cup, destination = current - 1 if destination < min(cups): destination = max(cups) while destination in pickup: destination -= 1 if destination < min(cups): destination = max(cups) ic(destination) # Rotate cups until destination is at tail rotate_to_tail(cups, destination) # Insert slice at tail cups.extend(pickup) # Rotate cups so that the old current cup is at tail, # placing the new current cup at head rotate_to_tail(cups, current) # Restore icecream state? if restore: ic.enable()
def main(): # ic.enable() ic.disable() # 出力側のスレッド - メッセージを受け取る out_ths = {} out_ths["led"] = LedThread() out_ths["buzzer"] = BuzzerThread() out_ths["servo"] = ServoThread() out_ths["dcm"] = DcmThread() out_ths["oled"] = OledThread() out_ques = {k:v.rcv_que for k, v in out_ths.items()} for out_th in out_ths.values(): out_th.start() # メッセージの交通整理のスレッド - メッセージの交通整理 pre_th = PreThread(out_ques) que_pre = pre_th.rcv_que pre_th.start() # 入力側のスレッド - メッセージを送信する in_ths = {} in_ths["sw"] = SwThread(que_pre) in_ths["httpd"] = HttpdThread(que_pre) in_ths["js"] = JsThread(que_pre) for in_th in in_ths.values(): in_th.start() try: while True: time.sleep(5) except KeyboardInterrupt: print("stop") # 終了処理 for out_th in out_ths.values(): out_th.stop() pre_th.stop() for in_th in in_ths.values(): in_th.stop() return
def plot_aux_1(info): client_id, results_per_server, aux_client_dir, client_name, output_dir, min_timestamp = info services = {} measurements = 0 if DEBUG: ic.enable() else: ic.disable() fig = plt.figure(figsize=(25, 15)) for _, results in results_per_server.items(): for result in results: service = msg_type_to_service[result[MSG_TYPE]] if service in services: ic(result[MSG_TYPE], client_id, result[ID], result[TIME_TOOK]) services[service]['x_axis'].append( normalize_x_axis(result, min_timestamp)) services[service]['y_axis'].append(result[TIME_TOOK]) else: services[service] = {'x_axis': [normalize_x_axis(result, min_timestamp)], 'y_axis': [ result[TIME_TOOK]]} for service in services: measurements += len(services[service]['x_axis']) new_x, new_y = sort_axis( services[service]['x_axis'], services[service]['y_axis']) plt.plot(new_x, new_y, '-o', label=service) client_dir = f"{output_dir}/{aux_client_dir}/{client_name}" if not os.path.exists(client_dir): os.mkdir(client_dir) ic(f"\tplotting {client_id} ({measurements} measurements)...") plot_path = f'{client_dir}/services.png' ic(f"Saving image to {plot_path}") plt.grid() plt.savefig(plot_path) plt.clf() plt.close(fig)
def cut_intermediate_dirs(self): root = self.rootdir files = glob_images(root) possible_classes = [] basename = ic(os.path.basename(os.path.normpath(root))) for f in files: fsplit = f.split(os.sep) for i, c in enumerate(fsplit[fsplit.index(basename):-1]): try: possible_classes[i].add(c) except IndexError: possible_classes.append(set([c])) mx_classes = 0 for i, pc in enumerate(ic(possible_classes)): if mx_classes <= len(pc): mx_classes = len(pc) class_idx = fsplit.index(basename) + i class_labels = ic(possible_classes[class_idx - fsplit.index(basename)]) for i, f in enumerate(files): fsplit = f.split(os.sep) label = fsplit[class_idx] if label not in class_labels: # for those f*****g datasets with weird structures!!!!! continue # f = os.path.join('/',*fsplit[:fsplit.index(basename)+1],label,fsplit[-1]) self.filenames.append(ic(f)) self.classes.append(ic(label)) if i > 10: renable = True ic.disable() if renable: ic.enable()
#!/usr/bin/env python3 import feedparser import youtube_dl import sys from pathlib import Path import argparse from time import time, mktime from datetime import datetime from dateutil.relativedelta import relativedelta from dateutil.parser import parse as dateparse from appdirs import AppDirs from icecream import ic import json ic.disable() if sys.version_info.major < 3 and sys.version_info.minor < 6: raise Exception("Must be using Python 3.6 or greater") xdgDirs = AppDirs("yt-dl-subs") if __name__ == "__main__": parser = argparse.ArgumentParser("Download YouTube subscriptions.") parser.add_argument( "--output-path", "-o", default=Path().home() / "Videos" / "YT Subs", help= f"The directory to which to save the videos. Default {Path().home() / 'Videos' / 'YT Subs'}",
def main(): args = sys.argv[1:] max_args = 8 min_args = 2 if len(args) < min_args or len(args) > max_args: print("usage: parse_logs.py <client_logs_folder> <server_logs_folder> [--only-one]" "[--print=trades,battles] [--dummy-infos=/tmp/dummy_infos.json]" " [--debug] [--output=<output_dir>] [--csvs]") sys.exit(1) logs_folder = "" server_logs_folder = "" only_one = False print_list = [] dummy_infos_path = "" output_dir = os.path.expanduser('~/plots') csvs = False debug = False min_timestamp = 0 ic.disable() for arg in args: if arg == "--only-one": only_one = True elif "--print" in arg: print_list = arg.split("=")[1].split(",") elif "--dummy-infos" in arg: dummy_infos_path = os.path.expanduser(arg.split("=")[1]) print(f"dummy_infos set to {dummy_infos_path}") elif "--debug" == arg: debug = True ic.enable() elif "--output" in arg: output_dir = os.path.expanduser(arg.split('=')[1]) elif "--csvs" == arg: csvs = True elif "--ts=" in arg: min_timestamp = float(arg.split("=")[1]) elif logs_folder == "": logs_folder = arg elif server_logs_folder == "": server_logs_folder = arg ic('Logging enabled') if not os.path.exists(output_dir): os.mkdir(output_dir) files = get_files_to_parse(logs_folder, only_one) dummy_infos = {} ips_to_nodes = {} if dummy_infos_path == "": dummy_infos_path = '/tmp/dummy_infos.json' if os.path.exists(dummy_infos_path): with open(dummy_infos_path, 'r') as dummy_infos_fp: infos = json.load(dummy_infos_fp) for info in infos: dummy_infos[info["name"]] = info ips_to_nodes[info["ip"]] = info["name"] results = {} print("\n{} PARSING FILES {}\n".format(INFO_HEADER * 2, INFO_HEADER * 2)) emitted, retries, requests, sent_reqs, got_resps = {}, {}, {}, {}, {} for file in files: emitted = parse_file_for_emits( file, emitted, retries, requests, sent_reqs, got_resps) for file in files: results[file[CLIENT_ID]] = parse_file_for_results( file, print_list, ips_to_nodes, emitted) with open(os.path.expanduser('~/logs_results.json'), 'w') as results_fp: json.dump(results, results_fp) process_results(min_timestamp, results, output_dir, debug, csvs) process_requests_retries(requests, retries, output_dir, csvs) process_sent_reqs_got_resps( sent_reqs, got_resps, csvs, server_logs_folder, output_dir)
def ic_off(): ic.disable() try: yield finally: ic.enable()
def ic_set(debug): if debug: ic.enable() else: ic.disable()
def addNewDevices(ip, portOpen, exist): try: puertoList = [] for puerto in portOpen: try: connection = socket(AF_INET, SOCK_STREAM) connection.connect((ip, puerto)) connection.send(b'HEAD / HTTP/1.0\r\n\r\n') banner = "" # Inicializamos banner por si al final hay error en el siguiente paso banner = connection.recv(1024) # Max 1024 Bytes contenido aux = str(banner).replace('\\r\\n', '<br/>') # Quitamos el espacio incial y los finales que no interesan. Ya tenemos el banner banner = aux[2:len(aux) - 3] except Exception: logging.warning( "Al realizar la conexion con el banner, puerto: %s. ", puerto) banner = None connection.close() # adñadir información de la direccion Ipv4 obj = pygeoip.GeoIP('Geo/GeoLiteCity.dat') location = obj.record_by_addr(str(ip)) ic.disable() ic('location: ', location) for key, val in location.items(): ic.disable() ic('%s : %s' % (key, val)) #Realizar la captura. imagen = capturadepantalla(ip, puerto) # Almacena 'Documentos' dentro de un arreglo, usando append. puerto = { 'Puerto': str(puerto), 'Banner': str(banner), 'Imagen': str(imagen) } puertoList.append(puerto) ic(puerto) # Información de los puertos: dominio = getfqdn(ip) # Dominio whois = IPWhois(ip).lookup_whois() # Whois dns = reversename.from_address(ip) # DNS # Fecha y hora del Equipo. date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') ic.disable() ic(banner) ic.disable() ic(dominio) ic.disable() ic(whois) ic.disable() ic(dns) ic.disable() ic(date) ic.disable() ic(puertoList) # Agrega la infromacion a la base de datos por primera vez. # Los atributos que se asignan son los siguientes: (ip, img, fecha ,location, whois, dominio, dns, puerto) if exist == 0: estado = True db = get_db() datos = Device(str(ip), estado, date, location, whois, str(dominio), str(dns), puertoList) db.Devices.insert_one(datos.toCollection()) logging.info("Ipv4: %s, Agregada!", ip) return "Se agrego correctamente!\n" # Paso el límite los días esblecidos if exist == 1: db = get_db() db.Devices.update_one({"Direccion": str(ip)}, { "$set": { "Estado": True, "Fecha": date, "Whois": whois, "Dominio": str(dominio), "Dns": str(dns), "puerto": puertoList } }) logging.info("Ipv4: %s, Actualizada!", ip) return "Se actualizo correctamente!\n" except Exception: logging.error("La direccion IPv4: %s no puede agregar o actualizar.", ip, "Conexion: Fallida! addNewDevices") exit(1)
def agregar(repeticiones): try: PortList = [ 22, 23, 25, 53, 80, 81, 110, 180, 443, 873, 2323, 5000, 5001, 5094, 5150, 5160, 7547, 8080, 8100, 8443, 8883, 49152, 52869, 56000, 1728, 3001, 8008, 8009, 10001, 223, 1080, 1935, 2332, 8888, 9100, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 21, 554, 888, 1159, 1160, 1161, 1435, 1518, 3389, 4550, 5005, 5400, 5550, 6550, 7000, 8000, 8081, 8090, 8150, 8866, 9000, 9650, 9999, 10000, 18004, 25001, 30001, 34567, 37777, 69, 135, 161, 162, 4786, 5431, 8291, 37215, 53413 ] # agregarle en una funcion #print("repeticiones", repeticiones) for contador in range(0, int(repeticiones)): # validar el tipo de busqueda. ip = Generar_IP_Ecuador_Aleatoria( ) # llamamos a la funcion, ip aleatorias ic.enable() Num = contador + 1 ic(Num, ip) # Comprobamos si la IPv4 está en la base de datos MongpAtlas findDeviceBD = find_devices(ip) ic.enable() ic(findDeviceBD) if (findDeviceBD == 0 or findDeviceBD == 1): portOpen = [] num = len(PortList) with alive_bar(num) as bar: for port in PortList: bar() estadoPort = OpenPort(ip, port) if estadoPort == True: ic.disable() ic(port, estadoPort) portOpen.append(port) else: ic.disable() ic(port, estadoPort) portsNumbers = len(portOpen) if int(portsNumbers) != 0: ic.enable() ic(portOpen) Estado = addNewDevices(ip, portOpen, findDeviceBD) ic.enable() ic(Estado) else: ic.enable() ic(portsNumbers) Estado = EmptyPort(ip, findDeviceBD) ic.enable() ic(Estado) ic.enable() else: print("La dirección IPv4", ip, " ya existe y es menor a los días establecidos") print("\n\nBusqueda Finalizada :) \n\n") return final() except Exception as e: print( "Se ha producido un error al agregar o actualizar la dirección IPv4:" + bcolors.WARNING + e + bcolors.ENDC) exit(1)