def _table_response_timing(lineno, total_time, total_queries_time, queries_count): table_data = [ [Color("{green}Total time:{/green}"), f"{total_time}s "], [ Color("{yellow}Database queries time:{/yellow}"), f"{total_queries_time}s " ], [Color("{cyan}Queries count:{/cyan}"), f"{queries_count} "], ] table_instance = SingleTable(table_data, f" {lineno} time ") table_instance.inner_heading_row_border = False return table_instance.table
def main(): res = arguments() json_content = json.load(open(res.FILE)) table_data = [["SOURCE", "TYPE", "DATA"]] for result in json_content: table_data.append([red(result["source"]), blue(result["type"]), result["data"]]) table_instance = SingleTable(table_data) table_instance.inner_heading_row_border = True table_instance.inner_row_border = False table_instance.justify_columns = {0: 'left', 1: 'left', 2: 'left'} print(f"\n[*] Entries found: {len(json_content)}") print(table_instance.table) print("")
def list(self, response): """ List available stagers Usage: list [-h] """ table_data = [["Name", "Description"]] for name, fields in response.result.items(): table_data.append([name, fields["description"]]) table = SingleTable(table_data, title="Available") table.inner_row_border = True print(table.table)
def instance_info(self, crawler, **options): instance = SingleTable([self.header(), self.row(crawler)]) instance.justify_columns[0] = 'right' start = SingleTable([[u] for u in crawler.start_urls.split('\n')], title=' Start URLs ') allow = SingleTable([[d] for d in crawler.allowed_domains.split('\n')], title=' Allowed ') block = SingleTable([[d] for d in crawler.blocked_domains.split('\n')], title=' Blocked ') start.inner_heading_row_border = False block.inner_heading_row_border = False self.stdout.write(instance.table) self.stdout.write('') output = start.table if crawler.allowed_domains: output = side_by_side(output, allow.table) if crawler.blocked_domains: output = side_by_side(output, block.table) self.stdout.write(output) self.stdout.write('')
def __report_summary_labels(self, cumulative): data = [("label", "status", "succ", "avg_rt", "error")] justify = {0: "left", 1: "center", 2: "right", 3: "right", 4: "left"} sorted_labels = sorted(cumulative.keys()) for sample_label in sorted_labels: if sample_label != "": data.append( self.__get_sample_element(cumulative[sample_label], sample_label)) table = SingleTable(data) if sys.stdout.isatty() else AsciiTable(data) table.justify_columns = justify self.log.info("Request label stats:\n%s", table.table)
def as_table(tlv, title=None, redact=False): res = [['Tag', 'Name', 'Value']] if type(tlv) is not TLV: return '' for tag, value in tlv.items(): res.append([ format_bytes(tag.id), tag.name or '', '\n'.join( textwrap.wrap(render_element(tag, value, redact=redact), 80)) ]) table = SingleTable(res) if title is not None: table.title = title return table.table
def summary_eapol(pkts, res): #table_data = [["BSSID", "AUTH TYPE", "AUTH ID", "USER ID", "MD5"]] table_data = [[]] network_names = defaultdict(list) usernames = list() eapol_packets_count = 0 for pkt in pkts: if (pkt.haslayer(Dot11) and pkt.type == 0 and pkt.subtype == 8): pass if pkt.haslayer(EAP): eapol_packets_count += 1 if (pkt[EAP].type == 1 and pkt[EAP].code == 2): usr_id = pkt[EAP].id usr = pkt[EAP].identity table_data.append([f"{green('[+]')} User found: {usr}"]) elif (pkt[EAP].type == 4 and pkt[EAP].code == 1): #EAP-MD5 md5_challenge = pkt[EAP].load[1:17] table_data.append([ f"{green('[+]')} EAP-MD5 : {green('request')} : {md5_challenge.encode('hex')}" ]) elif (pkt[EAP].type == 4 and pkt[EAP].code == 2): md5_response = pkt[EAP].load[1:17] table_data.append([ f"{green('[+]')} EAP-MD5 : {red('response')} : {md5_challenge.encode('hex')}" ]) # TODO: Finish table_data generation if len(table_data) == 1: table = SingleTable([[red("Nothing found")]]) elif eapol_packets_count == 0: table = SingleTable([[red("No EAP packets found")]]) else: table = SingleTable(table_data[1:]) table.inner_heading_row_border = False print(f"{s} EAPOL analysis {s}") print(table.table) if res.OUTPUT: with open(res.OUTPUT, "a") as out_file: out_file.write(f"\n{table.table}\n") print("")
def summary_bssids(pkts, res): table_data = [[ "SSID", "BSSID", "CHANNEL", "DBM", "ENCRYPTED", "ENCRYPTION TYPE" ]] dot11beacon_packets_count = 0 for pkt in pkts: if pkt.haslayer(Dot11Beacon): dot11beacon_packets_count += 1 stats = packet[Dot11Beacon].network_stats() try: dbm_signal = packet.dBm_AntSignal except: dbm_signal = "N/A" enc = red("x") ssid = p[Dot11Elt].info.decode() bssid = p[Dot11].addr3 try: channel = int(ord(p[Dot11Elt:3].info)) except: channel = stats.get("channel") capability = p.sprintf("{Dot11Beacon:%Dot11Beacon.cap%}\ {Dot11ProbeResp:%Dot11ProbeResp.cap%}") if re.search("privacy", capability): enc = green("+") enc_type = stats.get("crypto") table_data.append( [ssid, bssid, channel, dbm_signal, enc, enc_type]) print(f"{s} AP discovery {s}") if len(pkts) == 0: table = SingleTable([[red("No packets captured")]]) elif dot11beacon_packets_count == 0: table = SingleTable([[red("No Dot11Beacon packets found")]]) else: table = SingleTable(table_data) print(table.table) if res.OUTPUT: with open(res.OUTPUT, "a") as out_file: out_file.write(f"\n{table.table}\n") print("")
def hinton_diagram(arr, max_arr=None): max_arr = arr if max_arr is None else max_arr max_val = max(abs(np.max(max_arr)), abs(np.min(max_arr))) diagram = [list([hinton_diagram_value(x, max_val) for x in _arr]) for _arr in arr] table = SingleTable(diagram) table.inner_heading_row_border = False table.inner_footing_row_border = False table.inner_column_border = False table.inner_row_border = False table.column_max_width = 1 return table.table
def show(self): table_data = [] keys = self._service.keys() keys.sort() for key in keys: value = self._service.get(key) if isinstance(value, (str, int, bool)) is False: value = yaml.dump(value, default_flow_style=False).strip('\n') table_data.append([key, value]) table_instance = SingleTable(table_data, self.id) table_instance.inner_heading_row_border = False table_instance.inner_row_border = True print table_instance.table
def format(self, title="", text=[]): """ text = [ [ row 1 [ line1, line2 ] ], [row 2 [ lines]] ] if text is a string, then convert to single row, but respect the \n """ if isinstance(text, str): text = [[i for i in text.split('\n')]] tw = TextWrapper(fix_sentence_endings=True, width=self.col - 4) table_data = [[self.color("\n".join(tw.fill(j, ) for j in i))] for i in text] table = SingleTable(table_data, self.color(title)) table.inner_row_border = True return table.table
def ls(): table_to_display = [("key", "value", "source")] # os.environ['MAISIE_API_PAGE'] = '20' # os.environ['MAISIE_API_PER_PAGE'] = '19' config = Config() for config_name in PERMITTED_SETTINGS: config_name_full = f"{APP_ENV_PREFIX}_{config_name.upper()}" value = getattr(config, config_name) source = config.getsource(config_name) table_to_display.append((config_name_full, value, source)) table = SingleTable(table_to_display) table.inner_row_border = True click.echo(table.table)
def draw_user_history_table(json_response): j = json_response data = [] title_line = ['Image URL', 'Delete URL'] data.append(title_line) for item in j['data']: picture_item = [] picture_item.append(item['url']) picture_item.append(item['delete']) data.append(picture_item) table_instance = SingleTable(data, 'SM.MS User History') table_instance.inner_row_border = True return table_instance.table
def ask_stash(player): clear() while True: print(SingleTable([['Would you like to stash any drugs?']]).table) ans = input("\n> ") aout = check_ans_yn(ans) if aout == 1: clear() visit_stash(player) break elif aout == 2: clear() break
def users(accountable, query): """ Executes a user search for the given query. """ users = accountable.users(query) headers = ['display_name', 'key'] if users: rows = [[v for k, v in sorted(u.items()) if k in headers] for u in users] rows.insert(0, headers) print_table(SingleTable(rows)) else: click.secho('No users found for query {}'.format(query), fg='red')
def run_diagnostic(config): table_rows = [["Name", "Result", "Infos"]] for check_runner in checks.check_runners: result = check_runner.run_check(config) if result["status"] != "EMPTY": print(check_runner.name + ": done") table_rows.append(build_check_result_row(check_runner.name, result)) table = SingleTable(table_rows, title="Your personal server diagnostic") print("") print(table.table)
def ask_bank(player): clear() while True: print(SingleTable([['Would you like to visit the bank?']]).table) ans = input("\n> ") aout = check_ans_yn(ans) if aout == 1: clear() visit_bank(player) break elif aout == 2: clear() break
def print_telemetry_reporting_info() -> None: """Print telemetry information to std out.""" message = textwrap.dedent( f""" Rasa Open Source reports anonymous usage telemetry to help improve the product for all its users. If you'd like to opt-out, you can use `rasa telemetry disable`. To learn more, check out {DOCS_URL_TELEMETRY}.""" ).strip() table = SingleTable([[message]]) print(table.table)
def __dump_locations_if_needed(self): if self.settings.get("dump-locations", False): locations = {} for loc in self._workspaces.locations(include_private=True): locations[loc['id']] = loc data = [("ID", "Name")] for location_id in sorted(locations): location = locations[location_id] data.append((location_id, location['title'])) table = SingleTable(data) if sys.stdout and sys.stdout.isatty() else AsciiTable(data) self.log.warning("Dumping available locations instead of running the test:\n%s", table.table) raise NormalShutdown("Done listing locations")
def print_table(data: object, ENABLED_ENCODERS: List[BaseEncoder], ENABLED_COMPRESSORS: List[BaseCompressor], BANDWIDTH: float) -> None: table_data = [[f'Network: {naturalsize(BANDWIDTH)}/s', 'Encoding phase']] for compressor in ENABLED_COMPRESSORS: table_data[0].append(compressor.__name__) for Encoder in ENABLED_ENCODERS: encoder = Encoder() head_line = [Encoder.__name__] size_line = [' size'] time_line = [' time'] network_line = [' network'] total_time_line = [' total time'] encoded_data = encoder.encode(data) size_line.append(encoder.size) base_network_time = encoder._size / BANDWIDTH base_total_time = encoder._time + base_network_time time_line.append(f"{encoder.time}") network_line.append(f"{base_network_time * 1000:4.0f} ms") total_time_line.append(f"{base_total_time * 1000:4.0f} ms") for Compressor in ENABLED_COMPRESSORS: compressor = Compressor() compressor.compress(encoded_data) size_line.append("{size}".format( size=compressor.size, reduction=compressor._size / encoder._size - 1)) time_line.append("{time:=+5.0f} ms".format( time=compressor._time * 1000, ratio=compressor._time / encoder._time)) network_time = compressor._size / BANDWIDTH total_time = encoder._time + compressor._time + network_time network_line.append("{time:=+5.0f} ms".format( time=(network_time - base_network_time) * 1000, ratio=network_time / total_time)) total_time_line.append(f"{total_time * 1000:4.0f} ms") table_data.append(head_line) table_data.append(size_line) table_data.append(time_line) table_data.append(network_line) table_data.append(total_time_line) table = SingleTable(table_data) table.justify_columns[0] = 'left' for ind in range(1, len(table_data[0])): table.justify_columns[ind] = 'right' print(table.table)
def exp_stats(projects): exp_result = {} all_tools = ('naturalize', 'codebuff', 'styler') for name in projects: # print(name) exp_result[name] = experiment(name, f'./styler/{name}-corpus') exp_result[name]['all_tools'] = list( reduce(lambda a, b: a | b, [set(exp_result[name][tool]) for tool in all_tools])) tools = ('naturalize', 'codebuff', 'styler', 'all_tools') repaired_error_types = {tool: [] for tool in (*tools, 'out_of')} for project, project_result in exp_result.items(): experiment_dir = get_experiment_dir(project) errored_dir = os.path.join(experiment_dir, f'./errored/1') for tool, repaired_files in project_result.items(): # repaired_files = project_result[tool] error_types = reduce(list.__add__, [[ checkstyle_source_to_error_type(error['source']) for error in open_json( os.path.join(errored_dir, f'{id}/metadata.json'))['errors'] ] for id in repaired_files]) repaired_error_types[tool] += error_types repaired_error_types_count = { tool: dict_count(values) for tool, values in repaired_error_types.items() } repaired_error_types_count_relative = { tool: { error_type: (float(count) / repaired_error_types_count['out_of'][error_type] * 100.) for error_type, count in repaired_error_types_count[tool].items() } for tool in tools } # json_pp(repaired_error_types_count_relative) keys = list(repaired_error_types_count['out_of'].keys()) # result = {project:{tool:len(repair) for tool, repair in p_results.items()} for project, p_results in result.items()} # result['total'] = { key:sum([e[key] for e in result.values()]) for key in keys } #json_pp(total) table_data = [[''] + [ f'{key}\n( /{repaired_error_types_count["out_of"][key]})' for key in keys ]] table_data += [[tool] + [ f'{repaired_error_types_count_relative[tool].get(error_type, 0):.1f}%' for error_type in keys ] for tool in tools] table = SingleTable(table_data) print(table.table) return repaired_error_types_count
def main(): arg_parser = argparse.ArgumentParser(description=DESCRIPTION) arg_parser.add_argument("--raw", action="store_true", help="raw mode") arg_parser.add_argument("--json", action="store_true", help="json mode " "(not compatible with raw mode)") arg_parser.add_argument("--plugins-base-dir", type=str, default=None, help="can be use to set an alternate " "plugins-base-dir, if not set the value of " "MFMODULE_PLUGINS_BASE_DIR env var is used (or a " "hardcoded standard value).") args = arg_parser.parse_args() if args.json and args.raw: print("ERROR: json and raw options are mutually exclusives") sys.exit(1) if not is_plugins_base_initialized(args.plugins_base_dir): echo_bold("ERROR: the module is not initialized") echo_bold(" => start it once before installing your plugin") print() print("hint: you can use %s.start to do that" % MFMODULE_LOWERCASE) print() sys.exit(3) plugins = get_installed_plugins(plugins_base_dir=args.plugins_base_dir) json_output = [] table_data = [] table_data.append(["Name", "Version", "Release", "Home"]) for plugin in plugins: name = plugin['name'] release = plugin['release'] version = plugin['version'] home = plugin['home'] if args.raw: print("%s~~~%s~~~%s~~~%s" % (name, version, release, home)) elif args.json: json_output.append({ "name": name, "release": release, "version": version, "home": home }) else: table_data.append([name, version, release, home]) if not args.raw and not args.json: t = SingleTable(title="Installed plugins (%i)" % len(plugins), table_data=table_data) print(t.table) elif args.json: print(json.dumps(json_output, indent=4))
def options(self, response): """ Show selected module options Usage: options [-h] """ table_data = [["Option Name", "Required", "Value", "Description"]] for k, v in response.result.items(): table_data.append([k, v["Required"], v["Value"], v["Description"]]) table = SingleTable(table_data, title=self.selected['name']) table.inner_row_border = True print(table.table)
def palabras_mezcladas_game(palabras_mezcladas, player): """Función donde se ejecuta las mecánicas del juego. Toma las palabras, las coloca en una tabla con ayuda de terminaltables, con la función random.shuffle se desordena el string y lo convierte en una lista y luego con el join esa lista la vuelvo a convertir en string. Args: [Objeto clase Normal_Game_NR]: ahorcado, juego normal sin requisitos donde está toda la información del juego. [Player]: información del juegador Returns: [Bool]: True, si se gana el juego, False, si no. """ print(palabras_mezcladas.get_name().capitalize()) print(palabras_mezcladas.get_rules().capitalize()) question_n = random.randint(0, 2) info_question = palabras_mezcladas.send_question(question_n) words = info_question["words"] words_mix = [[f"""Categoría: {info_question["category"]}"""]] words_mix_aux = [] for i in range(len(words)): word = words[i] char_list = list(word) random.shuffle(char_list) word_mix = ''.join(char_list) words_mix_aux.append(word_mix) word_mix_list = [f"""%s{word_mix}%s""" % (fg(1), attr(0))] words_mix.append(word_mix_list) guessed_count = 0 while guessed_count < 5 and player.get_time_left() > datetime.timedelta( ) and player.get_lives() > 0: table = SingleTable(words_mix) print(table.table) guess = input(f"""{info_question["question"]}\n==> """) if guess in words: print('%s ¡Correcto! %s' % (fg(2), attr(0))) index = words.index(guess) words_mix[index + 1] = [ f"""%s{words_mix_aux[index]}%s""" % (fg(2), attr(0)) ] guessed_count += 1 else: print(f'%s {guess} no está en las palabras :/ %s' % (fg(1), attr(0))) print("Pierdes media vida") player.set_lives(player.get_lives() - 0.5) print(table.table) if guessed_count == 5: return True
def info(self, guid: str, response): """ Get info of a specified session Usage: info [-h] <guid> """ table_data = [["Name", "Value"]] for k,v in response.result['info'].items(): table_data.append([k, v]) table = SingleTable(table_data, title="Session Info") table.inner_row_border = True print(table.table)
def run(): from terminaltables import SingleTable table_data = [['Command', 'Description'], ['help or ?', 'Display this menu'], ['info', 'Display current configuration options'], ['search', 'Search exploitdb for exploits and get link'], ['tools', 'Display available tools'], ['set <key name>', 'Set configuration key'], ['reset', 'Reset configuration to default'], ['update', 'Check for updates and update thes framework'], ['tutorial', 'Run the tutorial wizard'], ['exit', 'Exit framework']] table = SingleTable(table_data) print(table.table)
def create_garden_table(garden): garden_table = [] for row in garden: garden_row = [] for box in row: garden_row.append(create_box_table(box).table) garden_table.append(garden_row) table_instance = SingleTable(garden_table) table_instance.inner_heading_row_border = False table_instance.inner_row_border = True return table_instance
def main(): # URI Link URI = "http://dashboard.pldthome.com/HomeWifiLoad/Availment/_LoadBalance" # Get the data data = request.urlopen(URI).read() # Create a BeatifulSoup object soup = BeautifulSoup(data, "html.parser") # Retrieve Main Title from the page main_title = soup.find("h2", {"class": "hwd"}).getText() # Retrieve the parent of the significant data parent_element = soup.find("div", {"class": ["balance", "prepaid"]}) # Retrieve significant data general_data = parent_element.findAll("div", {"class": ["row", "prepaid"]}) rows = list() for row in general_data: # Title of data title = row.find("p", {"class": "family-pack"}).getText().title() # Value of data value = row.find("p", {"class": ["value", "bal-info"]}).getText() # Expiration date tmp_exp = row.find("p", {"class": "expiration"}).getText() if tmp_exp.startswith("Expires on "): exp = tmp_exp[11:] rows.append([title, value, exp]) # Custom column name col_name = ["Name", "Value", "Expiration"] # insert column names to rows rows.insert(0, col_name) # Create a table table = SingleTable(rows) # Set table title table.title = chalk.yellow(main_title) # Print the table print(table.table)
def do_qq(self, arg): 'Get quote for stock q <symbol> or option q <symbol> <call/put> <strike> <(optional) YYYY-mm-dd>' arg = arg.strip().split() try: symbol = arg[0].upper() except: print("Please check arguments again. Format: ") print("Stock: q <symbol>") print( "Option: q <symbol> <call/put> <strike> <(optional) YYYY-mm-dd>" ) type = strike = expiry = None if len(arg) > 1: try: type = arg[1] strike = arg[2] except Exception as e: print("Please check arguments again. Format: ") print("q <symbol> <call/put> <strike> <(optional) YYYY-mm-dd>") try: expiry = arg[3] except: expiry = None arg_dict = { 'symbol': symbol, 'type': type, 'expiration_dates': expiry, 'strike_price': strike, 'state': 'active', 'tradability': 'tradable' } quotes = self.trader.get_option_quote(arg_dict) qquote_t_data = [] qquote_table = SingleTable(qquote_t_data, 'Quote List') qquote_table.inner_row_border = True qquote_table.justify_columns = {0: 'center', 1: 'center'} qquote_t_data.append(['expiry', 'price']) for row in quotes: qquote_t_data.append(row) print((qquote_table.table)) else: try: self.trader.print_quote(symbol) except: print("Error getting quote for:", symbol)
def warn(): """ Deprecation warning for old way to invoke script """ msg = [ [Color('{yellow}WARNING{/yellow}')], [ "The 'linode-beta' command has been deprecated and renamed to " "'linode-cli'. Please invoke with that command to hide this warning." ] ] print(SingleTable(msg).table) main()