def _get_candidates(self) -> typing.List[Span]: """ Return a list of spans with candidate topics, such that the start of each candidate is a noun chunk that was trimmed of stopwords or tokens with POS tags that we wish to ignore. returns: list of candidate spans """ candidates: typing.List[Span] = [] try: noun_chunks = list(self.doc.noun_chunks) for chunk in noun_chunks: for token in chunk: if self._keep_token(token): candidates.append(self.doc[token.i:chunk.end]) break except NotImplementedError as ex: # some languages don't have `noun_chunks` support in spaCy models, e.g. "ru" ic.disable() ic(ex) ic.enable() return candidates
def DateTime(FechaBD, days): try: # Válida los paremetros de la fecha y hora cadena = datetime.strptime(FechaBD, "%Y-%m-%d %H:%M:%S") ahora = datetime.now() # Obtener la hora actual de equipo # Establecer los días máximos a superar. treintadias = timedelta(days=days) fechaacomparar = ahora - treintadias ic(cadena, fechaacomparar) if cadena < fechaacomparar: # Supera el limite de días establecidos. estadoFecha = 1 else: estadoFecha = -1 ic.enable() ic(estadoFecha) return estadoFecha except Exception as e: logging.error( "Se ha producido un error al validar la fecha. DateTime()") exit(1)
def __init__(self, lines: list, options, inFilename: str): """Initialize a few class variables in preparation for our walk.""" self.lines = lines self.options = options self.inFilename = inFilename self.docLines = [] ic.configureOutput(includeContext=True) ic.enable() if self.options.debug else ic.disable()
def release_pigpio(self): ic() if self.__borrow_count <= 0: ic.enable() ic(sys._getframe().f_code.co_filename, sys._getframe().f_code.co_name, "ERROR!") return self.__borrow_count -= 1 if 0 == self.__borrow_count: self.__pi.stop() return
def calc_textrank(self) -> typing.List[Phrase]: """ Iterate through each sentence in the doc, constructing a [*lemma graph*](https://derwen.ai/docs/ptr/glossary/#lemma-graph) then returning the top-ranked phrases. This method represents the heart of the *TextRank* algorithm. returns: list of ranked phrases, in descending order """ t0 = time.time() self.reset() self.lemma_graph = self._construct_graph() # to run the algorithm, we use the NetworkX implementation # for PageRank (i.e., based on eigenvector centrality) # to calculate a rank for each node in the lemma graph self.ranks = nx.pagerank( self.lemma_graph, personalization=self.get_personalization(), ) # agglomerate the lemmas ranked in the lemma graph into ranked # phrases, leveraging information from earlier stages of the # pipeline: noun chunks and named entities nc_phrases: typing.Dict[Span, float] = {} try: nc_phrases = self._collect_phrases(self.doc.noun_chunks, self.ranks) except NotImplementedError as ex: # some languages don't have `noun_chunks` support in spaCy models, e.g. "ru" ic.disable() ic(ex) ic.enable() ent_phrases: typing.Dict[Span, float] = self._collect_phrases( self.doc.ents, self.ranks) all_phrases: typing.Dict[Span, float] = {**nc_phrases, **ent_phrases} # since noun chunks can be expressed in different ways (e.g., may # have articles or prepositions), we need to find a minimum span # for each phrase based on combinations of lemmas raw_phrase_list: typing.List[Phrase] = self._get_min_phrases( all_phrases) phrase_list: typing.List[Phrase] = sorted(raw_phrase_list, key=lambda p: p.rank, reverse=True) t1 = time.time() self.elapsed_time = (t1 - t0) * 1000.0 return phrase_list
def find_devices(IPV4): try: db = get_db() # Conexiíon a la BD valor = 0 Ipv4Bd = '' search = db.Devices.find({'Direccion': IPV4}) for r in search: Ipv4Bd = r['Direccion'] ic.disable() ic(Ipv4Bd) estadoBd = r['Estado'] ic.disable() ic(estadoBd) fechaBd = r['Fecha'] ic.disable() ic(fechaBd) if (Ipv4Bd != ''): # Existe! if (estadoBd == True): # Existen Puertos Abiertos ic.disable() ic(estadoBd) Tiempoconsulta = 30 # Tiempo en días. valor = DateTime(fechaBd, Tiempoconsulta) ic.enable() ic(valor) else: ic.disable() ic(estadoBd) Tiempoconsulta = 15 # Tiempo en días. valor = DateTime(fechaBd, Tiempoconsulta) ic.enable() ic(valor) else: # No Existe! valor = 0 #print ("No existe la direccion IPV4 ingresada",band) return valor except Exception: logging.error( "Al buscar la Direccion IPv4 : %s en la base de datos. find_devices()", IPV4) exit(1)
def repeat(repeticiones): try: # repeticiones=1 ## si usuario no ingresa ningun valor, por defecto es 1 direción ip # Realizara una busqueda de 100 direciones ipv4. if int(repeticiones) > 1000: repeticiones = 1000 ic.enable() ic("Se van a examinar:", repeticiones) return repeticiones except Exception: logging.error( "Se ha producido un error en la cantidad de repeticiones. ", ) exit(1)
def testEnableDisable(self): with disableColoring(), captureStandardStreams() as (out, err): assert ic(a) == 1 assert ic.enabled ic.disable() assert not ic.enabled assert ic(b) == 2 ic.enable() assert ic.enabled assert ic(c) == 3 pairs = parseOutputIntoPairs(out, err, 2) assert pairs == [[('a', '1')], [('c', '3')]]
def main(): args = sys.argv[1:] max_num_args = 3 if len(args) > max_num_args: print( "Usage: python3 plot_stats.py <experiment_dir> [--ts=min_timestamp] [--debug]" ) exit(1) experiment_dir = "" min_timestamp = 0 debug = False for arg in args: if '--ts=' in arg: min_timestamp = float(arg.split('=')[1]) elif '--debug' == arg: debug = True elif experiment_dir == "": experiment_dir = arg if debug: ic.enable() else: ic.disable() server_nodes_prefix = 'server_nodes' client_nodes_prefix = 'client_nodes' with open(f'{experiment_dir}/info.json', 'r') as f: info = json.load(f) nodes = info['nodes'] server_nodes = nodes['server'] client_nodes = nodes['client'] plot_bandwidths(min_timestamp, experiment_dir, server_nodes, server_nodes_prefix) plot_cpu_mem_stats(min_timestamp, experiment_dir, server_nodes, server_nodes_prefix) plot_bandwidths(min_timestamp, experiment_dir, client_nodes, client_nodes_prefix) plot_cpu_mem_stats(min_timestamp, experiment_dir, client_nodes, client_nodes_prefix)
def capturadepantalla(ip, puerto): setdefaulttimeout(30) try: nombreimagen = "Noimagen.png" #browser=""#UnboundLocalError: local variable 'browser' referenced before assignment optionsChr = webdriver.ChromeOptions() optionsChr.add_argument("--headless") optionsChr.add_argument('--disable-gpu') optionsChr.add_argument('--log-level=3') optionsChr.set_capability("acceptInsecureCerts", True) optionsChr.add_argument("--incognito") optionsChr.add_argument('--ignore-certificate-errors') optionsChr.add_argument('--version') browser = webdriver.Chrome( executable_path= r'C:\\IoT_Divices_ESFOT\\FirefoxDriver\\chromedriver.exe', options=optionsChr) browser.implicitly_wait(10) browser.set_page_load_timeout(10) browser.get("http://{0}".format(ip) + ":" + str(puerto)) nombreimagen = str(ip) + "," + str( puerto) + ".png" # Nombre de la Img. sleep(1) ic.enable() ic(nombreimagen) screenshot = browser.get_screenshot_as_file( r"C:\\IoT_Divices_ESFOT\\capturas\\" + str(nombreimagen)) # Bool ic.disable() ic(screenshot) state = screenshot ic.disable() ic("screenshot", state) browser.close() except Exception: state = False nombreimagen = "Noimagen.png" return nombreimagen print("Captura Exitosa!") return nombreimagen
def move(cups): """ { Perform one move of the cups by crab's rules. "Current" is assumed to be at left side of the deque at function call.} :param cups: The cups :type cups: Typing.deque """ # Save icecream state? restore = ic.enabled ic.disable() # Get current cup current = cups[0] ic(cups) ic(current) # Rotate the current cup to tail, pop the pickup cups cups.rotate(-1) pickup = (cups.popleft(), cups.popleft(), cups.popleft()) ic(pickup) # Get destination cup, destination = current - 1 if destination < min(cups): destination = max(cups) while destination in pickup: destination -= 1 if destination < min(cups): destination = max(cups) ic(destination) # Rotate cups until destination is at tail rotate_to_tail(cups, destination) # Insert slice at tail cups.extend(pickup) # Rotate cups so that the old current cup is at tail, # placing the new current cup at head rotate_to_tail(cups, current) # Restore icecream state? if restore: ic.enable()
def plot_aux_1(info): client_id, results_per_server, aux_client_dir, client_name, output_dir, min_timestamp = info services = {} measurements = 0 if DEBUG: ic.enable() else: ic.disable() fig = plt.figure(figsize=(25, 15)) for _, results in results_per_server.items(): for result in results: service = msg_type_to_service[result[MSG_TYPE]] if service in services: ic(result[MSG_TYPE], client_id, result[ID], result[TIME_TOOK]) services[service]['x_axis'].append( normalize_x_axis(result, min_timestamp)) services[service]['y_axis'].append(result[TIME_TOOK]) else: services[service] = {'x_axis': [normalize_x_axis(result, min_timestamp)], 'y_axis': [ result[TIME_TOOK]]} for service in services: measurements += len(services[service]['x_axis']) new_x, new_y = sort_axis( services[service]['x_axis'], services[service]['y_axis']) plt.plot(new_x, new_y, '-o', label=service) client_dir = f"{output_dir}/{aux_client_dir}/{client_name}" if not os.path.exists(client_dir): os.mkdir(client_dir) ic(f"\tplotting {client_id} ({measurements} measurements)...") plot_path = f'{client_dir}/services.png' ic(f"Saving image to {plot_path}") plt.grid() plt.savefig(plot_path) plt.clf() plt.close(fig)
def cut_intermediate_dirs(self): root = self.rootdir files = glob_images(root) possible_classes = [] basename = ic(os.path.basename(os.path.normpath(root))) for f in files: fsplit = f.split(os.sep) for i, c in enumerate(fsplit[fsplit.index(basename):-1]): try: possible_classes[i].add(c) except IndexError: possible_classes.append(set([c])) mx_classes = 0 for i, pc in enumerate(ic(possible_classes)): if mx_classes <= len(pc): mx_classes = len(pc) class_idx = fsplit.index(basename) + i class_labels = ic(possible_classes[class_idx - fsplit.index(basename)]) for i, f in enumerate(files): fsplit = f.split(os.sep) label = fsplit[class_idx] if label not in class_labels: # for those f*****g datasets with weird structures!!!!! continue # f = os.path.join('/',*fsplit[:fsplit.index(basename)+1],label,fsplit[-1]) self.filenames.append(ic(f)) self.classes.append(ic(label)) if i > 10: renable = True ic.disable() if renable: ic.enable()
def trainRL(train_loader, valid_loader, test_loader, model_encoder, model_decoder, epoch, args, norm): ic(epoch) epoch_size = len(valid_loader) file_name = "%s_%s" % (args.policy_name, args.env_name) if args.save_models and not os.path.exists("./pytorch_models_test"): os.makedirs("./pytorch_models_test") env = envs(args, model_encoder, model_decoder, epoch_size) state_dim = args.state_dim # ic(state_dim) action_dim = args.z_dim # ic(action_dim) max_action = args.max_action # Initialize policy if args.policy_name == "TD3": policy = TD3.TD3(state_dim, action_dim, max_action) elif args.policy_name == "OurDDPG": policy = OurDDPG.DDPG(state_dim, action_dim, max_action) elif args.policy_name == "DDPG": policy = DDPG.DDPG(state_dim, action_dim, max_action, args.device) replay_buffer = utils.ReplayBuffer() # evaluations = [evaluate_policy(policy,valid_loader,env,args)] evaluations = [ evaluate_policy(policy, valid_loader, env, args, render=False) ] ic.enable() total_timesteps = 0 timesteps_since_eval = 0 episode_num = 0 done = True env.reset(epoch_size=len(train_loader)) ic.enable() while total_timesteps < args.max_timesteps: if done: try: dataloader_iterator = iter(train_loader) input = next(dataloader_iterator) except: dataloader_iterator = iter(train_loader) input = next(dataloader_iterator) if total_timesteps != 0: # print("Total T: %d Episode Num: %d Episode T: %d Reward: %f") % (total_timesteps, episode_num, episode_timesteps, episode_reward) if args.policy_name == "TD3": ic("TD3") policy.train(replay_buffer, episode_timesteps, args.batch_size, args.discount, args.tau, args.policy_noise, args.noise_clip, args.policy_freq) else: # ic("else") policy.train(replay_buffer, episode_timesteps, args.batch_size, args.discount, args.tau) # Evaluate episode if timesteps_since_eval >= args.eval_freq: timesteps_since_eval %= args.eval_freq # evaluations.append(evaluate_policy(policy,valid_loader,env,args,render = False)) if args.save_models: policy.save(file_name, directory="./pytorch_models_test") env.reset(epoch_size=len(test_loader)) test_policy(policy, test_loader, env, args, render=True) env.reset(epoch_size=len(train_loader)) # Reset environment # obs = env.reset() done = False episode_reward = 0 episode_timesteps = 0 episode_num += 1 # Select action randomly or according to policy obs = env.agent_input(input[0]) if total_timesteps < args.start_timesteps: # action_t = torch.rand(args.batch_size, args.z_dim) # TODO checked rand instead of randn action_t = torch.FloatTensor(args.batch_size, args.z_dim).uniform_( -args.max_action, args.max_action) action = action_t.detach().cpu().numpy().squeeze(0) # obs, _, _, _, _ = env(input, action_t) else: # action_rand = torch.randn(args.batch_size, args.z_dim) # # obs, _, _, _, _ = env( input, action_rand) action = policy.select_action(np.array(obs)) if args.expl_noise != 0: action = (action + np.random.normal( 0, args.expl_noise, size=args.z_dim)).clip( -args.max_action * np.ones(args.z_dim, ), args.max_action * np.ones(args.z_dim, )) action = np.float32(action) action_t = torch.tensor(action).cuda().unsqueeze(dim=0) # Perform action # env.render() new_obs, _, reward, done, _, _ = env(input, action_t, disp=True) # new_obs, reward, done, _ = env.step(action) done_bool = 0 if episode_timesteps + 1 == args.max_episodes_steps else float( done) episode_reward += reward # Store data in replay buffer # ic(type(obs)) # ic(np.shape(obs)) # ic(type(new_obs)) # ic(np.shape(new_obs)) # ic(type(action)) # ic(np.shape(action)) # ic(type(reward)) replay_buffer.add((obs, new_obs, action, reward, done_bool)) obs = new_obs episode_timesteps += 1 total_timesteps += 1 timesteps_since_eval += 1
def ic_off(): ic.disable() try: yield finally: ic.enable()
def main(): args = sys.argv[1:] max_args = 8 min_args = 2 if len(args) < min_args or len(args) > max_args: print("usage: parse_logs.py <client_logs_folder> <server_logs_folder> [--only-one]" "[--print=trades,battles] [--dummy-infos=/tmp/dummy_infos.json]" " [--debug] [--output=<output_dir>] [--csvs]") sys.exit(1) logs_folder = "" server_logs_folder = "" only_one = False print_list = [] dummy_infos_path = "" output_dir = os.path.expanduser('~/plots') csvs = False debug = False min_timestamp = 0 ic.disable() for arg in args: if arg == "--only-one": only_one = True elif "--print" in arg: print_list = arg.split("=")[1].split(",") elif "--dummy-infos" in arg: dummy_infos_path = os.path.expanduser(arg.split("=")[1]) print(f"dummy_infos set to {dummy_infos_path}") elif "--debug" == arg: debug = True ic.enable() elif "--output" in arg: output_dir = os.path.expanduser(arg.split('=')[1]) elif "--csvs" == arg: csvs = True elif "--ts=" in arg: min_timestamp = float(arg.split("=")[1]) elif logs_folder == "": logs_folder = arg elif server_logs_folder == "": server_logs_folder = arg ic('Logging enabled') if not os.path.exists(output_dir): os.mkdir(output_dir) files = get_files_to_parse(logs_folder, only_one) dummy_infos = {} ips_to_nodes = {} if dummy_infos_path == "": dummy_infos_path = '/tmp/dummy_infos.json' if os.path.exists(dummy_infos_path): with open(dummy_infos_path, 'r') as dummy_infos_fp: infos = json.load(dummy_infos_fp) for info in infos: dummy_infos[info["name"]] = info ips_to_nodes[info["ip"]] = info["name"] results = {} print("\n{} PARSING FILES {}\n".format(INFO_HEADER * 2, INFO_HEADER * 2)) emitted, retries, requests, sent_reqs, got_resps = {}, {}, {}, {}, {} for file in files: emitted = parse_file_for_emits( file, emitted, retries, requests, sent_reqs, got_resps) for file in files: results[file[CLIENT_ID]] = parse_file_for_results( file, print_list, ips_to_nodes, emitted) with open(os.path.expanduser('~/logs_results.json'), 'w') as results_fp: json.dump(results, results_fp) process_results(min_timestamp, results, output_dir, debug, csvs) process_requests_retries(requests, retries, output_dir, csvs) process_sent_reqs_got_resps( sent_reqs, got_resps, csvs, server_logs_folder, output_dir)
def ic_set(debug): if debug: ic.enable() else: ic.disable()
) parser.add_argument( "--no-download", default=False, help="Set this to not download any videos.", action="store_true", ) parser.add_argument("--quiet", "-q", default=False, help="Reduce the output.", action="store_true") args = parser.parse_args() if args.debug: ic.enable() ic(args) if args.retain and args.retain < args.since: print( "It is not a good idea to remove newer files than what you want to download." ) if input("Continue y/[n]: ").lower() != "y": quit() # The current run time. scriptStartTime = time() if isinstance(args.config_path, str): confDir = Path(args.config_path)
def agregar(repeticiones): try: PortList = [ 22, 23, 25, 53, 80, 81, 110, 180, 443, 873, 2323, 5000, 5001, 5094, 5150, 5160, 7547, 8080, 8100, 8443, 8883, 49152, 52869, 56000, 1728, 3001, 8008, 8009, 10001, 223, 1080, 1935, 2332, 8888, 9100, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 21, 554, 888, 1159, 1160, 1161, 1435, 1518, 3389, 4550, 5005, 5400, 5550, 6550, 7000, 8000, 8081, 8090, 8150, 8866, 9000, 9650, 9999, 10000, 18004, 25001, 30001, 34567, 37777, 69, 135, 161, 162, 4786, 5431, 8291, 37215, 53413 ] # agregarle en una funcion #print("repeticiones", repeticiones) for contador in range(0, int(repeticiones)): # validar el tipo de busqueda. ip = Generar_IP_Ecuador_Aleatoria( ) # llamamos a la funcion, ip aleatorias ic.enable() Num = contador + 1 ic(Num, ip) # Comprobamos si la IPv4 está en la base de datos MongpAtlas findDeviceBD = find_devices(ip) ic.enable() ic(findDeviceBD) if (findDeviceBD == 0 or findDeviceBD == 1): portOpen = [] num = len(PortList) with alive_bar(num) as bar: for port in PortList: bar() estadoPort = OpenPort(ip, port) if estadoPort == True: ic.disable() ic(port, estadoPort) portOpen.append(port) else: ic.disable() ic(port, estadoPort) portsNumbers = len(portOpen) if int(portsNumbers) != 0: ic.enable() ic(portOpen) Estado = addNewDevices(ip, portOpen, findDeviceBD) ic.enable() ic(Estado) else: ic.enable() ic(portsNumbers) Estado = EmptyPort(ip, findDeviceBD) ic.enable() ic(Estado) ic.enable() else: print("La dirección IPv4", ip, " ya existe y es menor a los días establecidos") print("\n\nBusqueda Finalizada :) \n\n") return final() except Exception as e: print( "Se ha producido un error al agregar o actualizar la dirección IPv4:" + bcolors.WARNING + e + bcolors.ENDC) exit(1)