def list(self, name: str, running: bool, available: bool): """ Get running/available listeners Usage: list [<name>] [--running] [--available] [-h] Arguments: name filter by listener name Options: -h, --help Show dis -r, --running List running listeners -a, --available List available listeners """ table_data = [ ["Name", "Description"] ] for l in self.loaded: table_data.append([l.name, l.description]) table = AsciiTable(table_data, title="Available") table.inner_row_border = True print(table.table) table_data = [ ["Type", "Name", "URL"] ] for l in self.listeners: table_data.append([l.name, l["Name"], f"https://{l['BindIP']}:{l['Port']}"]) table = AsciiTable(table_data, title="Running") table.inner_row_border = True print(table.table)
def output_ascii_table_list(table_title=None, table_data=None, table_header=None, inner_heading_row_border=False, inner_row_border=False): """ @type table_title: unicode @type table_data: list @type inner_heading_row_border: bool @type inner_row_border: bool @type table_header: list """ console_rows, _ = get_console_dimensions() console_rows = int(console_rows) full_display_length = len(table_data) + 7 items_per_page = console_rows - 7 num_pages = 0 if full_display_length > console_rows: try: num_pages = int(math.ceil(float(len(table_data)) / float(items_per_page))) except ZeroDivisionError: exit('Console too small to display.') if num_pages: running_count = 0 for page in range(1, num_pages + 1): page_table_output = list() page_table_output.insert(0, table_header) upper = (console_rows + running_count) - 7 if upper > len(table_data): upper = len(table_data) for x in range(running_count, upper): page_table_output.append(table_data[x]) running_count += 1 table = AsciiTable(page_table_output) table.inner_heading_row_border = inner_heading_row_border table.inner_row_border = inner_row_border table.title = table_title if page != 1: print('') print(table.table) if page < num_pages: input("Press Enter to continue...") os.system('clear') else: table_data.insert(0, table_header) table = AsciiTable(table_data) table.inner_heading_row_border = inner_heading_row_border table.inner_row_border = inner_row_border table.title = table_title print(table.table)
async def _print_hosts(self, hosts, table_title=None): table_data = [["Id", "IP", "Hostname", "Discovered Services", "Matched Signature(s)"]] async with ScanDatabase(connection=self.db) as db: for entry in hosts: host_id, hostname, ip = entry service_count = await db.get_service_count_on_host(host_id) matched_sigs = map( lambda x: x[0].split(','), filter( lambda x: x[0] is not None, await db.get_matched_sigs_on_host(host_id) ) ) table_data.append([ host_id, ip, hostname, service_count, ','.join(set(sig_name for result in matched_sigs for sig_name in result)) ]) table = AsciiTable(table_data) table.inner_row_border = True table.title = table_title print(table.table)
def __str__(self): table = TermTable(self._table_data()) table.inner_row_border = False table.title = "Dataset " + self.id return table.table
def get_order(self, option): # When option = 0 return response # When option = 1 print response url = 'https://pizzaonline.dominos.co.in/view/cart' data = { 'isAjaxRequest': 'json' } response = self.session.post(url, data = data) response = response.json() table_data = [] customer_basket_info = response['customer_basket_info'] table_data.append(['BASKET ID', 'NAME', 'QUANTITY', 'PRICE']) for item in customer_basket_info: product_name = item['products_name'] basket_id = item['customers_basket_id'] price = item['final_price'] quantity = item['customers_basket_quantity'] table_data.append([basket_id, product_name, quantity, price]) if option: net_price = response['net_price'] total_price = response['total_price'] table_data.append(['NET PRICE',net_price,'TOTAL PRICE', total_price]) table = AsciiTable(table_data) table.inner_row_border = True print() print(table.table) else: return table_data[1:]
def show_workflow(ctx, name): """Display details of a workflow""" data = [] try: workflow = cel_workflows.get_by_name(name) except WorkflowNotFound as e: click.echo(f"Error: {e}") return # Construct the table data.append(["Name", name]) # Wrap the tasks list tasks_str = "" for task in workflow["tasks"]: tasks_str += f"{task}\n" # Just remove the last newline if tasks_str: tasks_str = tasks_str[:-1] data.append(["Tasks", tasks_str]) # Handle periodic information periodic = workflow.get("periodic", {}).get("schedule", "--") data.append(["Periodic", periodic]) payload = workflow.get("periodic", {}).get("payload", {}) data.append(["Payload", payload]) table = AsciiTable(data) table.inner_heading_row_border = False table.inner_row_border = True click.echo(table.table)
def __str__(self): table = TermTable(self._table_data()) table.inner_row_border = True return table.table
def show_missing_documentation(package, device): missingDocs = Experiments.getMissingDocumentation(package, device) missingDocs.insert(0, ["test case", "test steps", "#"]) table = AsciiTable(missingDocs) table.inner_row_border = True print table.table raw_input("TO CONTINUE PRESS ENTER...")
def display_tabled_gads(authors, rendered_gad_months, title, border, width=0): """Display a table of gads per author according to gads_render_func.""" if len(authors) <= 1: width = 1 elif width == 0: gad_width = max( [non_ansi_len(l) for l in rendered_gad_months[1].splitlines()]) author_width = max([non_ansi_len(a) for a in authors]) auto_width = (MAX_WIDTH - 1) / (max(gad_width, author_width) + 3) width = max(1, auto_width) table_data = list( chain.from_iterable( izip( [authors[i:i + width] for i in xrange(0, len(authors), width)], [ rendered_gad_months[i:i + width] for i in xrange(0, len(rendered_gad_months), width) ]))) if border == "ascii": display_table = AsciiTable(table_data) elif border == "single": display_table = SingleTable(table_data) elif border == "double": display_table = DoubleTable(table_data) else: exit(1) display_table.inner_row_border = True display_table.inner_column_border = True display_table.title = title sys.stdout.write(fg(DEFAULT_COLOR)) print display_table.table.encode('utf-8'), attr(0).encode('utf-8')
def pass_terminal(self): while True: text = self.cmd_session.prompt( bottom_toolbar=bottom_toolbar(self.passwords)) if text == "exit": self.exit() sys.exit() else: switch_result = self.switch_module(text) if switch_result == False: if text.lower() == "run": self.current_module.run(self.passwords) elif text.lower() == "help": table = AsciiTable(self.table_help) table.inner_row_border = True print(table.table) print("") elif text.lower() == "list": self.list_accts() else: print("[!] Command not found.") else: continue
def answer(self): data = np.array(self._answer.reshape((9, 9)), dtype='str') data[data == '0'] = ' ' table = AsciiTable(data.tolist()) table.inner_row_border = True print(table.table) return self._answer
def main_help(): commands = ['database', 'sniffing', 'exploit', 'modelling', 'exit'] description = [ 'Use database mode.', 'Use sniffing mode.', 'Use exploit mode.', 'Use modelling mode.', 'Quit this program' ] table_data = [['Commands', 'Description']] for i in range(len(commands) - 1): table_data.append([commands[i], description[i]]) table = AsciiTable(table_data) table.inner_column_border = False table.inner_footing_row_border = False table.inner_heading_row_border = True table.inner_row_border = False table.outer_border = False msg = f""" Core commands ============= {table.table}\n\n""" return msg
def option(self): """ Print the options required by the module Usage: options [-h] Options: -h, --help print this help menu """ table_data = [ ["Name", "Current Settings", "Required", "Description"] ] for name, options in self.options.items(): table_data.append([name, options["Current Settings"], options["Require"], options["Description"]]) table = AsciiTable(table_data) table.inner_column_border = False table.inner_footing_row_border = False table.inner_heading_row_border = True table.inner_row_border = False table.outer_border = False print (f'\nModule Options ({self.name}):\n\n{table.table}\n')
def run(self) -> AnyStr: """Do the run.""" data = [] for value in reversed(dir(self._data)): # don't show dunders if marked by _show_dunders if self._is_dunder(value) and not self._show_dunders: continue attribute = getattr(self._data, value) data.append( [ self._export.coloring.style( value, fg=self._type_color(value)), self._export.coloring.style( str(type(attribute)), fg='blue', bold=True), self._arguments(attribute), get_description(attribute, value) ] ) header = ['name', 'type', 'arguments', 'description'] table = AsciiTable([header] + data) table.inner_row_border = True # save the table self._export.store(table.table) # print to standard output or return return self._export.export()
def view_table(self, dict_name, func_list): func_all_list = [] new_table_list = [] for j in func_list: for t in j: ts = "".join(list(t)[1:]) func_all_list.append(ts.split("(")[0] + "()") for source, value in dict_name.items(): call_func_list = [] for call_value in value['Call_Func']: if call_value in func_all_list: create_func = call_value + " [M]" else: create_func = call_value + " [O]" call_func_list.append(create_func) dict_name[source]['Call_Func'] = call_func_list for source_name, values in dict_name.items(): tmp = [] call_func_unique = list(set(values['Call_Func'])) user_func_unique = list(set(values['Define_Func'])) call_func_str = "\n".join(call_func_unique) user_func_str = "\n".join(user_func_unique) tmp.append(source_name) tmp.append(call_func_str) tmp.append(user_func_str) new_table_list.append(tmp) table = AsciiTable(new_table_list) table.inner_row_border = True print(table.table)
def print_download_item(self, item, ascii_table): dimensions = get_max_dimensions(ascii_table) title = "" for line in textwrap.wrap(item.podcast.title, dimensions[0][0], initial_indent=' ', subsequent_indent=' '): title += line + "\n" summ = "" for line in textwrap.wrap(item.summary, dimensions[0][1], initial_indent=' ', subsequent_indent=' '): summ += line + "\n" if ascii_table: ascii_table.table_data.append([title, summ]) print(ascii_table_last(ascii_table)) return ascii_table else: table_headers = [['title', 'summary']] table_data = [[title, summ]] ascii_table = AsciiTable(table_headers + table_data) ascii_table.inner_row_border = True print(ascii_table.table) return ascii_table
def get_exposure_matrix(rosters, exclude=[]): players = set() for r in rosters: for p in r.players: if p in exclude: continue players.add(p) sorted_names = sorted([p.short_name for p in players]) player_matrix = np.zeros((len(players), len(players)), dtype=int) for r in rosters: for i, p1 in enumerate(sorted_names): for j, p2 in enumerate(sorted_names): if p1 in r and p2 in r: player_matrix[i, j] += 1 rows = [[''] + sorted_names] for i, p in enumerate(sorted_names): rows.append([p] + list(player_matrix[i, :])) table = AsciiTable(rows) table.inner_row_border = True table.justify_columns = {i + 1: 'center' for i in range(len(sorted_names))} return table.table
def list_workflow(ctx): """List the workflows""" workflows = { k: v for k, v in sorted(cel_workflows.workflows.items(), key=lambda item: item[0]) } data = [[f"Workflows ({len(workflows)})", "Periodic", "Tasks"]] # Add a row for each workflow for name, conf in workflows.items(): # Periodic info periodic = conf.get("periodic", {}).get("schedule", "--") # Wrap the tasks list tasks_str = "" for task in conf["tasks"]: tasks_str += f"{task}\n" # Just remove the last newline if tasks_str: tasks_str = tasks_str[:-1] data.append([name, periodic, tasks_str]) table = AsciiTable(data) table.inner_row_border = True table.justify_columns[1] = "center" click.echo(table.table)
def write_out_results(self): stats = self._calc_stats() rps = stats.rps results_table_data = [ ['Item', 'Value', 'Info'], ['Successful calls', '%r'%stats.count, '测试成功的连接数'], ['Total time', '%.4f'%stats.total_time, '总耗时'], ['Average Time', '%.4f'%stats.avg, '每个连接的平均耗时'], ['Fatest Time', '%.4f'%stats.min, '最小耗时'], ['Slowest Time', '%.4f'%stats.max, '最大耗时'], ['Amplitude', '%4f'%stats.amp, '最大耗时和最小耗时之差'], ['Stand deviation', '%.6f'%stats.stdev, '耗时标准差'], ['Request Per Second', '%d'%rps, '每秒的访问量'] ] results_table = AsciiTable(results_table_data, 'Rsults') results_table.inner_row_border = True print('\n') print(results_table.table) print('\r\r\n\n') status_table_data = [ ['Status Code', 'Items'] ] for code, items in self.status_code_counter.items(): status_table_data.append(['%d'%code, '%d'%len(items)]) status_table = AsciiTable(status_table_data, 'StausCode') print(status_table.table)
def view_services(filterquery=None): """Prints out list of services and its relevant information""" table = [] table.append(["Service Name", "Stacks", "Containers", "Parent S", "Child S", "Endpoints" ]) if filterquery: services = filterquery.all() #services = session.query(filterquery).all() else: services = session.query(Service).all() if not services: print "No services met the search" return for service in services: state = service.get_state() parents = [p['parent'] for p in state['parent']] children = [c['child'] for c in state['childs']] cs = [] for stack in state['stacks']: for i, container in enumerate(stack['container']): endpoint = service.tree_on_stack_pointer(i) if endpoint: cs.append("%s:%s:%s" % (container['name'], container['version'], endpoint.name)) else: cs.append("%s:%s" % (container['name'], container['version'])) #cs.extend(["%s:%s" % (c['name'],c['version']) for c in stack['container']]) table.append([str(state['name']), "\n".join([ s['name'] for s in state['stacks'] if s]), str("\n".join(cs)), "\n".join(parents), "\n".join(children), "\n".join(state['endpoints'])]) t = AsciiTable(table) t.inner_row_border = True print t.table
def print_download_item(self, item, ascii_table): dimensions = get_max_dimensions(ascii_table) title = "" for line in textwrap.wrap( item.podcast.title, dimensions[0][0], initial_indent=' ', subsequent_indent=' '): title += line + "\n" summ = "" for line in textwrap.wrap( item.summary, dimensions[0][1], initial_indent=' ', subsequent_indent=' '): summ += line + "\n" if ascii_table: ascii_table.table_data.append([title, summ]) print(ascii_table_last(ascii_table)) return ascii_table else: table_headers = [['title', 'summary']] table_data = [[title, summ]] ascii_table = AsciiTable(table_headers + table_data) ascii_table.inner_row_border = True print(ascii_table.table) return ascii_table
def review_ghost(): ghost_dir = os.path.join(this_dir, 'user_data/ghost') names = os.listdir(ghost_dir) names = sorted(names) offset = 9 data = [[str(i + 1).rjust(2), names[i].center(BANNER_BODY_WIDTH - offset)] for i in range(len(names))] data.append(['-1', 'Exit'.center(BANNER_BODY_WIDTH - offset)]) tb = AsciiTable(data, 'Selected to review') tb.inner_row_border = True print(tb.table) while True: inp = input('Your Input:').strip() try: inpp = int(inp) if inpp == -1: return 0 if inpp in range(1, len(names) + 1): break else: print('{} is out of bound.'.format(inpp)) except: continue ob_content = np.load(os.path.join(ghost_dir, names[inpp - 1])) ob_content, recall_bitmap = pipeline_util.query_list(ob_content) recall_summary(recall_bitmap) if len(np.where(recall_bitmap == 0)[0]) != 0: pipeline_util.review_oblivious(ob_content, recall_bitmap)
def get_puzzle_as_str(puzzle): table = AsciiTable(puzzle) table.inner_heading_row_border = False table.inner_row_border = True table.justify_columns[0] = "center" table.justify_columns[1] = "center" return table.table
def to_readable_output(self, serialized_packet): """Converts the decoded, but serialized packet to a clean, readable output. Intended for human readability. Args: serialized_packet: The raw, decoded APRS packet string. """ try: packet = aprslib.parse(serialized_packet) table_data = [[ "From ", "To ", "Lat ", "Long ", "Alt ", "Comment ", "Text " ], [ self.__get_formatted(packet, 'from', 9), self.__get_formatted(packet, 'to', 9), self.__get_formatted(packet, 'latitude', 8), self.__get_formatted(packet, 'longitude', 8), self.__get_formatted(packet, 'altitude', 8), self.__get_formatted(packet, 'comment', 27), self.__get_formatted(packet, 'text', 27), ]] table_instance = AsciiTable(table_data, ' Packet ') table_instance.inner_heading_row_border = False table_instance.inner_row_border = True return '\n' + table_instance.table except (aprslib.ParseError, aprslib.UnknownFormat): return serialized_packet
def display_results(results, save, out=None): table_data = [] table_data.append(['Issue ID', 'Description', 'Comments']) table = AsciiTable(table_data) max_width = table.column_max_width(1) align_width = int(max_width / 2) for result in results: description = results[result]["description"] comments = results[result]["comments"] if not description and not comments: continue if not description: description = "--" if not comments: comments = "--" if len(str(description)) > align_width: description = '\n'.join(wrap(str(description), align_width)) if len(str(comments)) > align_width: comments = '\n'.join(wrap(str(comments), align_width)) table.table_data.append([result, description, comments]) table.inner_row_border = True print(table.table) print("[+] Returned " + str(len(table.table_data) - 1) + " items\n") if save: output = "\n[+] Jecretz Results\n\n" + table.table + "\n\n[+] Returned " + str( len(table.table_data) - 1) + " items\n\n" with open(out, "w") as file: file.write(output)
def block_header(cur): """ block portion of header """ block_data = [ [ "Total :", get(cur, "total_adlist_enabled") + "/" + get(cur, "total_adlist"), ], [ "Our Lists :", get(cur, "our_adlist_enabled") + "/" + get(cur, "our_adlist") ], [ "Others :", get(cur, "other_adlist_enabled") + "/" + get(cur, "other_adlist"), ], ] block_table = AsciiTable(block_data) block_table.inner_heading_row_border = False block_table.outer_border = False block_table.inner_row_border = False block_table.inner_column_border = False rows = adlist_top3_by_comment(cur) t3_block_data = [] for row in rows: t3_block_data.append([row[0], row[1]]) t3_block_table = AsciiTable(t3_block_data) t3_block_table.inner_heading_row_border = False t3_block_table.outer_border = False t3_block_table.inner_row_border = False t3_block_table.inner_column_border = False table_data = [ ["Ad/Blocklist Stats", "Top 3 by Comment"], [block_table.table, t3_block_table.table], [], ] table = SingleTable(table_data) table.padding_left = 2 table.outer_border = False utils.info(table.table)
def print_table(table_data): table = AsciiTable(table_data) table.inner_row_border = True if table_data[:1] in ([['TITLE', 'IMDB RATING']], [['TITLE', 'TOMATO RATING']]): table.justify_columns[1] = 'center' print("\n") print(table.table)
def print_table(table_data): table = AsciiTable(table_data) table.inner_row_border = True if table_data[:1] in ([['TITLE', 'IMDB RATING']], [['TITLE', 'TOMATO RATING']]): table.justify_columns[1] = 'center' print("\n") print(table.table)
def render_basic(data, title=None): table = AsciiTable(data, title=title) table.inner_row_border = True table.inner_footing_row_border = True table.padding_left = 5 table.padding_right = 5 print(table.table) print("\n")
def show_status(self, services=None): services = self.check_service(services, msg='In SHOW STATUS:') table_data = [] table_data.append([ 'Service', 'Host', 'Service-Status', 'Image-Status', 'Depends-On', 'Ports', 'Network-Mode', 'Stats' ]) try: default_network = self.stream['networks']['default']['driver'] except: default_network = 'bridge' for service in services: container = self.get_container_instance_by_service_name(service) host = self.get_host_instance_by_container_id(service) image_status = '' host_color = "{autobgwhite}{%s}%s{/%s}{/autobgwhite}" % ( host.color, container.hostip, host.color) container_color = "{autobgwhite}{%s}%s{/%s}{/autobgwhite}" % ( container.color, container.status, container.color) if container._image_status == 'changed': image_status = container._image_status depends = '' for depend in container.s_depends_on: depend_container = self.get_container_instance_by_service_name( depend) depend_container_color = "- {autobgwhite}{%s}%s{/%s}{/autobgwhite}\n" % ( depend_container.color, depend, depend_container.color) depends += (Color(depend_container_color)) depends = depends.strip('\n') ports = '' for port in container.ports: ports += "- %s\n" % port ports = ports.strip('\n') nm = default_network if container.network_mode == '' else container.network_mode stats = '' for s in [ 'cpu:' + str(container.cpu_utilization) + '%' + '\n', 'mem:' + str(container.mem_usage) + 'm' + '\n', 'check:' + str(container.exec_time) + 'ms' ]: stats += s table_data.append([ container.id, Color(host_color), Color(container_color), image_status, depends, ports, nm, stats ]) table_instance = AsciiTable(table_data) table_instance.inner_heading_row_border = False table_instance.inner_row_border = True print table_instance.table
def _create_table(data): """Creates a table from the given data.""" table = AsciiTable(data) table.inner_column_border = False table.inner_row_border = False table.outer_border = False table.inner_heading_row_border = False table.padding_right = 4 return str(table.table)
def print_results(self, headers, rows): limit = self.args.limit if self.args.limit < len(rows) else len(rows) data = [headers] + [[getattr(row, x) for x in headers if hasattr(row,x)] for row in rows] table = AsciiTable(data) table.title = "Possible Matche(s)" table.inner_row_border = True print(table.table) output = 'Total : {0}'.format(len(data) - 1) print(output)
def display_account_summary(self): html_txt = self.login() parse_data = self.parse_response(html_txt) table_data = [(key, chalk.blue(value)) for key, value in parse_data] table_instance = AsciiTable(table_data) table_instance.inner_heading_row_border = False table_instance.inner_row_border = True print(chalk.blue("\nAccount Summary".upper())) print(table_instance.table)
async def _print_services(self, services, table_title=None): table_data = [["Id", "URL", "Title", "Server", "Matched Signature(s)"]] for entry in services: service_id, url, _, _, _, title, server, _, _, matched_sigs, _ = entry table_data.append([service_id, url, title, server, matched_sigs]) table = AsciiTable(table_data) table.inner_row_border = True table.title = table_title print(table.table)
def get_puzzle_as_str(puzzle): # print(puzzle) # raise ValueError(str(puzzle)) table = AsciiTable(puzzle) table.inner_heading_row_border = False table.inner_row_border = True table.justify_columns[0] = "center" table.justify_columns[1] = "center" return table.table
def group_minterms(self, mts=[]): """ Groups the elements in minterms according to the number of ones in their binary representation. Args: minterms: A non empty list of minterms in binary form. Returns: A dictionary of minterms where each key represents the number of ones present in the binary representation and each value is a list of minterms with an equal number of ones in their binary representation Raises: """ #sort the minterms for presentation purpose mts.sort() grps = [] #maximum possible number of groups num_groups = len(mts[0]) ##########################for printing to the console################################## table_data = [[ Color('{autogreen}Group{/autogreen}'), Color('{autogreen}Minterms (decimal){/autogreen}'), Color('{autogreen}Minterms (binary){/autogreen}') ]] table = AsciiTable(table_data) table.inner_row_border = True self.procedure += Color( '{autoblue}==========================\nStep 1 : Grouping Minterms\n==========================\n{/autoblue}\n' ) ######################################################################################### for i in range(num_groups + 1): grp = [x for x in mts if x.count('1') == i] if grp: grps.append(grp) for i in range(len(grps)): ##################################for printing to the console############################### num_ones = grps[i][0].count('1') grp = grps[i] table_data.append([num_ones, str(int(grp[0], 2)), grp[0]]) for j in range(1, len(grp)): table.table_data[i + 1][1] += "\n" + str(int(grp[j], 2)) table.table_data[i + 1][2] += "\n" + grp[j] self.procedure += str(table.table) ########################################################################################### return grps
def print_matches(matches): matches_arr = [] for match in matches: #match_arr = [str(match[2]), str(match[3]), str(match[4]), str(match[5]), str(match[1])] match_arr = [str(match[2]), str(match[3]), str(match[4]), str(match[5])] matches_arr.append(match_arr) #print (matches_array) table = AsciiTable(matches_arr) table.inner_heading_row_border = False table.inner_row_border = True print (table.table)
def view_endpoints(): """Lists the endpoints defined Arguments: *sort by service *sort by stage """ table_data = [['Endpoint name', 'ip', 'pubport', 'url', 'mainservice', 'stackpointer', 'tree']] for endpoint in session.query(Endpoint).all(): subtree = view_endpoint_tree(endpoint) table_data.append([str(endpoint.name), str(endpoint.ip), str(endpoint.pubport), str(endpoint.url), str(endpoint.service.name), str(endpoint.stackpointer), subtree]) table = AsciiTable(table_data) table.inner_row_border = True print table.table
def test_multi_line(): """Test multi-lined cells.""" table_data = [ ['Show', 'Characters'], ['Rugrats', 'Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles,\nDil Pickles'], ['South Park', 'Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick'] ] table = AsciiTable(table_data) # Test defaults. actual = table.table expected = ( '+------------+-------------------------------------------------------------------------------------+\n' '| Show | Characters |\n' '+------------+-------------------------------------------------------------------------------------+\n' '| Rugrats | Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles, |\n' '| | Dil Pickles |\n' '| South Park | Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick |\n' '+------------+-------------------------------------------------------------------------------------+' ) assert actual == expected # Test inner row border. table.inner_row_border = True actual = table.table expected = ( '+------------+-------------------------------------------------------------------------------------+\n' '| Show | Characters |\n' '+------------+-------------------------------------------------------------------------------------+\n' '| Rugrats | Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles, |\n' '| | Dil Pickles |\n' '+------------+-------------------------------------------------------------------------------------+\n' '| South Park | Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick |\n' '+------------+-------------------------------------------------------------------------------------+' ) assert actual == expected # Justify right. table.justify_columns = {1: 'right'} actual = table.table expected = ( '+------------+-------------------------------------------------------------------------------------+\n' '| Show | Characters |\n' '+------------+-------------------------------------------------------------------------------------+\n' '| Rugrats | Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles, |\n' '| | Dil Pickles |\n' '+------------+-------------------------------------------------------------------------------------+\n' '| South Park | Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick |\n' '+------------+-------------------------------------------------------------------------------------+' ) assert actual == expected
def list(self, which): if which == 'new': self.print_summary_table() # for item in EpisodeTable.select().where(EpisodeTable.new): # self.print_summary(item.summary) # print("\n") if which == 'pod': table_headers = [['id', 'title']] table_data = [] for item in PodcastTable.select(): table_data.append([str(item.id), item.title]) ascii_table = AsciiTable(table_headers + table_data) ascii_table.inner_row_border = True print(ascii_table.table)
def view_stack(service, stackname=None): """View the stack container version and position and tree points Arguments: service: service object stackname: Name of stack if only one stack should be viewed """ table_data = [['Stackname', 'host', 'image', 'conatiners']] for stack in service.stacks: table_data.append([str(stack.name), str(stack.host), str(stack.image), "\n".join([c.name for c in stack.container])]) print table_data table = AsciiTable(table_data) table.inner_row_border = True print table.table
def view_containers(): services = session.query(Service).all() table = [] table.append(['Name', 'image', 'version', 'port', 'containerid']) for service in services: for stack in service.stacks: for container in stack.container: st = container.get_state() table.append([str(st['name']), str(st['image']), str(st['version']), str(st['port']), str(st['containerid'])[0:15]]) t = AsciiTable(table) t.inner_row_border = True print t.table
def list(self): """ Get available stagers Usage: list [-h] """ table_data = [ ["Name", "Description"] ] for l in self.loaded: table_data.append([l.name, l.description]) table = AsciiTable(table_data, title="Available") table.inner_row_border = True print(table.table)
def show_qualitative(baseline, predicted, ignore_types=False): data = [] baseline_dict = map_id_to_field(baseline, "changes") predicted_dict = map_id_to_field(predicted, "changes") writings_dict = map_id_to_field(baseline, "text") level_pred_dict = map_id_to_field(predicted, "level") level_base_dict = map_id_to_field(baseline, "level") nat_pred_dict = map_id_to_field(predicted, "nationality") nat_base_dict = map_id_to_field(baseline, "nationality") total_items = 0 precisions = collections.defaultdict(lambda: []) recalls = collections.defaultdict(lambda: []) precisions_per_nat = collections.defaultdict(lambda: []) recalls_per_nat = collections.defaultdict(lambda: []) precision_list = [] recall_list = [] for id_, text in writings_dict.items(): if id_ in predicted_dict: text = format_text(text) baseline = format_changes(baseline_dict[id_]) predicted = format_changes(predicted_dict.get(id_, [])) base = flatten2(id_, baseline_dict[id_], ignore_types) prediction = flatten2(id_, predicted_dict.get(id_, []), ignore_types) prec = "{}".format(precision(base, prediction)) rec = "{}".format(recall(base, prediction)) row = [id_, text, baseline, predicted, prec, rec] data.append(row) precision_list.append(float(prec)) recall_list.append(float(rec)) level = int(level_pred_dict.get(id_, level_base_dict.get(id_, 0))) precisions[level].append(float(prec)) recalls[level].append(float(rec)) nat = nat_pred_dict.get(id_, nat_base_dict.get(id_, '')) precisions_per_nat[nat].append(float(prec)) recalls_per_nat[nat].append(float(rec)) total_items += 1 data = sorted(data, key=lambda row: float(row[-2]), reverse=True) headers = ["id", "text", "baseline", "predicted", "precision", "recall"] data.insert(0, headers) table = AsciiTable(data) table.inner_row_border = True print(table.table) print("total items: ", total_items) print("average precision: {} (std: {})".format(mean(precision_list), stdev(precision_list))) print("average recall: {} (std: {})".format(mean(recall_list), stdev(recall_list)))
def view_endpoint(endpointname, obj=None): """Prints out a single endpoint""" table_data = [['Endpoint name', 'ip', 'pubport', 'url', 'mainservice', 'stackpointer', 'tree']] if not obj: endpoint = session.query(Endpoint).filter(Endpoint.name.like(endpointname)).first() else: endpoint = obj if endpoint: print endpoint.get_state() subtree = view_endpoint_tree(endpoint) table_data.append([str(endpoint.name), str(endpoint.ip), str(endpoint.pubport), str(endpoint.url), str(endpoint.service.name), str(endpoint.stackpointer), subtree]) tree = AsciiTable(table_data) tree.inner_row_border = True print tree.table else: print "Endpoint not found"
def show_quantitative(annotated, predicted, ignore_types=False): annotated = map_id_to_field(annotated, "changes") predicted = map_id_to_field(predicted, "changes") for id_ in list(annotated.keys()): if id_ not in predicted: del annotated[id_] #top_missed_symbols(annotated, predicted) annotated = flatten(annotated, ignore_types) predicted = flatten(predicted, ignore_types) data = [ ["precision", "{}".format(precision(annotated, predicted))], ["recall", "{}".format(recall(annotated, predicted))] ] table = AsciiTable(data) table.inner_row_border = True print(table.table)
def output_ascii_table(table_title=None, table_data=None, inner_heading_row_border=False, inner_footing_row_border=False, inner_row_border=False): """ @type table_title: unicode @type table_data: list @type inner_heading_row_border: bool @type inner_footing_row_border: bool @type inner_row_border: bool """ table = AsciiTable(table_data) table.inner_heading_row_border = inner_heading_row_border table.inner_row_border = inner_row_border table.inner_footing_row_border = inner_footing_row_border table.title = table_title print(table.table)
def test_multi_line(): """Test multi-line tables.""" table_data = [ ['Show', 'Characters'], ['Rugrats', dedent('Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles,\n' 'Susie Carmichael, Dil Pickles, Kimi Finster, Spike')], ['South Park', 'Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick'] ] table = AsciiTable(table_data) expected = dedent("""\ +------------+-------------------------------------------------------------------------------------+ | Show | Characters | +------------+-------------------------------------------------------------------------------------+ | Rugrats | Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles, | | | Susie Carmichael, Dil Pickles, Kimi Finster, Spike | | South Park | Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick | +------------+-------------------------------------------------------------------------------------+""") assert expected == table.table table.inner_row_border = True expected = dedent("""\ +------------+-------------------------------------------------------------------------------------+ | Show | Characters | +------------+-------------------------------------------------------------------------------------+ | Rugrats | Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles, | | | Susie Carmichael, Dil Pickles, Kimi Finster, Spike | +------------+-------------------------------------------------------------------------------------+ | South Park | Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick | +------------+-------------------------------------------------------------------------------------+""") assert expected == table.table table.justify_columns = {1: 'right'} expected = dedent("""\ +------------+-------------------------------------------------------------------------------------+ | Show | Characters | +------------+-------------------------------------------------------------------------------------+ | Rugrats | Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles, | | | Susie Carmichael, Dil Pickles, Kimi Finster, Spike | +------------+-------------------------------------------------------------------------------------+ | South Park | Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick | +------------+-------------------------------------------------------------------------------------+""") assert expected == table.table
def create_table(res_map): table_data = [['function', 'path_params', 'methods', 'query_params / body']] max_width = 30 _data = [] for key, value in res_map.items(): _function = key pp = re.findall(r"\{(\w+)\}", value['resource']) _path_params = '' if pp: _path_params = ",\n".join(pp) _methods = ",\n".join(value['methods']) _remarks = '\n'.join(wrap(value['remarks'], 30)) _data.append([_function, _path_params, _methods, _remarks]) table_data.extend(_data) table_instance = AsciiTable(table_data) table_instance.inner_heading_row_border = True table_instance.inner_row_border = True return table_instance
def options(self): """ Show selected stager options Usage: options [-h] """ if self.selected: table_data = [ ["Option Name", "Required", "Value", "Description"] ] for k, v in self.selected.options.items(): table_data.append([k, v["Required"], v["Value"], v["Description"]]) table = AsciiTable(table_data) table.inner_row_border = True print(table.table) else: print_bad("No stager selected")
def print_summary_table(self, items=None): table_headers = [['title', 'summary']] table_data = [] ascii_table = None if not items: items = EpisodeTable.select().where(EpisodeTable.new) for item in items: term = Terminal() summ = "" for line in textwrap.wrap( item.summary, term.width * 0.7, initial_indent=' ', subsequent_indent=' '): summ += line + "\n" table_data.append([item.podcast.title, summ]) ascii_table = AsciiTable(table_headers + table_data) ascii_table.inner_row_border = True if ascii_table: print(ascii_table.table) else: "Nothing to show"
def list(self, name: str): """ Show available modules Usage: list [<name>] [-h] Arguments: name filter by module name Options: -h, --help Show dis """ table_data = [ ["Name", "Description"] ] for m in self.loaded: table_data.append([m.name, m.description]) table = AsciiTable(table_data, title="Modules") table.inner_row_border = True print(table.table)
def errors_table(summary): """Create a formatted table of source file names and the number of build errors reported for that file. Parameters ---------- summary: Dictionary of source file names and number of build errors. Returns ------- Formatted table string. """ file_table = [['Errors in file:', 'Count']] for file in summary.keys(): file_table.append([file, summary[file]]) table = AsciiTable(file_table) table.inner_row_border = True return table.table + '\n'
def table(header, rows): if not HAVE_TERMTAB: print_error("Missing dependency, install terminaltables (`pip install terminaltables`)") return # TODO: Refactor this function, it is some serious ugly code. content = [] for l in [header] + rows: to_append = [] for a in l: if isinstance(a, bytes): if sys.version_info < (3, 4): a = a.decode('utf-8', 'ignore') else: a = a.decode('utf-8', 'backslashreplace') if not isinstance(a, six.text_type): a = six.text_type(a) to_append.append(a.replace('\t', ' ').replace('\v', '\\v')) content.append(to_append) t = AsciiTable(content) if not t.ok: t.inner_row_border = True longest_col = t.column_widths.index(max(t.column_widths)) max_length_col = t.column_max_width(longest_col) if max_length_col > 0: for i, content in enumerate(t.table_data): if len(content[longest_col]) > max_length_col: temp = '' for l in content[longest_col].splitlines(): if len(l) > max_length_col: temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n' else: temp += l + '\n' content[longest_col] = temp.strip() t.table_data[i] = content return t.table
def get(self): loader = BookLogLoader(use_dropbox=True) accumulator = BookLogAccumulator() for log in loader.logs: accumulator.handle_log(log) def sanitize(value, width): rows = [] for row in value.split('\n'): row = '\n'.join(wrap(row, width)) rows.append(row) return '\n'.join(rows) def timestamp(x): book_id, record = x return datetime.strptime(record.last_updated, '%m/%d/%y, %I:%M %p') items = [['', 'ID', 'title', 'author', 'progress', 'notes']] widths = [17, 30, 30, 30, 30, 70] for book_id, book_record in sorted(accumulator.book_records.items(), key=timestamp, reverse=True): item = [ book_record.last_updated or '', book_id, book_record.title or '', book_record.author or '', ', '.join(book_record.progress), '\n'.join("{:>17s}: {}".format(note[0], note[1]) for note in book_record.notes), ] item = [sanitize(value, width) for value, width in zip(item, widths)] items.append(item) table = AsciiTable(items) table.inner_row_border = True self.writeln('<pre>') self.writeln( table.table ) self.writeln('</pre>')
def printable_summary(list_of_changed_files, status_cmake_configure, status_make, status_make_install, status_amazon_s3_upload, status_tests, summary_vera, summary_cppcheck, summary_format, summary_pep8, summary_errors, summary_warnings, number_of_errors, number_of_warnings, number_of_tests_total, number_of_tests_failed, ignore_vera, ignore_cppcheck, ignore_format, ignore_pep8, exit_code): """Create an overall build summary in a printable format. Parameters ---------- list_of_changed_files: List of changed source files. status_cmake_configure: Status of the 'CMake configure': True, False or None status_make: Status of the 'make': True, False or None status_make_install: Status of the 'make install': True, False or None status_amazon_s3_upload: Status of the Amazon S3 upload: True, False status_tests: Status of the test suite run: True, False or None summary_vera: Dictionary of dictionaries of VERA++ messages per file. summary_cppcheck: Dictionary of dictionaries of cppcheck messages per file. summary_format: Dictionary of dictionaries of clang-format messages per file. summary_pep8: Dictionary of dictionaries of PEP8 messages per file. summary_errors: Dictionary of build error messages. summary_warnings: Dictionary of build warning messages. number_of_errors: Number of errors. number_of_warnings: Number of warnings. number_of_tests_total: Number of tests total. number_of_tests_failed: Number of tests failed. exit_code: Build exit code: 0 or 1. Returns ------- Formatted build summary string. """ header = """ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + N E S T T r a v i s C I B u i l d S u m m a r y + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \n\n""" build_summary = header if get_num_msgs(summary_vera) > 0 or \ get_num_msgs(summary_cppcheck) > 0 or \ get_num_msgs(summary_format) > 0 or \ get_num_msgs(summary_pep8) > 0: build_summary += ' S T A T I C C O D E A N A L Y S I S\n' # Create formatted per-file-tables of VERA++, Cppcheck, clang-format # and PEP8 messages. build_summary += code_analysis_per_file_tables(summary_vera, summary_cppcheck, summary_format, summary_pep8) if number_of_warnings > 0: build_summary += '\n W A R N I N G S\n' build_summary += warnings_table(summary_warnings) if number_of_errors > 0: build_summary += '\n E R R O R S\n' build_summary += errors_table(summary_errors) build_summary += '\n\n B U I L D R E P O R T\n' summary_table = [ ['Changed Files :', ''], ['', 'No files have been changed.'], ['Static Code Analysis :', ''], ['VERA++', convert_summary_to_status_string(summary_vera, ignore_vera) + '\n' + '\nNumber of messages (MSGBLD0135): ' + str(get_num_msgs(summary_vera))], ['Cppcheck', convert_summary_to_status_string(summary_cppcheck, ignore_cppcheck) + '\n' + '\nNumber of messages (MSGBLD0155): ' + str(get_num_msgs(summary_cppcheck))], ['clang-format', convert_summary_to_status_string(summary_format, ignore_format) + '\n' + '\nNumber of messages (MSGBLD0175): ' + str(get_num_msgs(summary_format))], ['PEP8', convert_summary_to_status_string(summary_pep8, ignore_pep8) + '\n' + '\nNumber of messages (MSGBLD0195): ' + str(get_num_msgs(summary_pep8))], ['NEST Build :', ''], ['CMake configure', convert_bool_value_to_status_string(status_cmake_configure)], ['Make', convert_bool_value_to_status_string(status_make) + '\n' + '\nErrors : ' + str(number_of_errors) + '\nWarnings: ' + str(number_of_warnings)], ['Make install', convert_bool_value_to_status_string(status_make_install)], ['Make installcheck', convert_bool_value_to_status_string(status_tests) + '\n' + '\nTotal number of tests : ' + str(number_of_tests_total) + '\nNumber of tests failed: ' + str(number_of_tests_failed)], ['Artifacts :', ''], ['Amazon S3 upload', convert_bool_value_to_yes_no_string(status_amazon_s3_upload)] ] table = AsciiTable(summary_table) table.inner_row_border = True max_width = table.column_max_width(1) # Bypass Travis issue: ValueError: invalid width -29 (must be > 0) # (in the wrap() below max_width must be > 0) # The calculation of column_max_width is based on the returned terminal # width which sometimes seems to be zero resulting in a negative value. if max_width < 0: max_width = 70 table.table_data[1][1] = '\n'.join(wrap(', '.join(list_of_changed_files), max_width)) build_summary += table.table + '\n' if exit_code == 0: build_summary += '\nBUILD TERMINATED SUCCESSFULLY' else: build_summary += '\nBUILD FAILED' return build_summary
def code_analysis_per_file_tables(summary_vera, summary_cppcheck, summary_format, summary_pep8): """Create formatted per-file-tables of VERA++, Cppcheck, clang-format and PEP8 messages. Concatenate and return them. Parameters ---------- summary_vera: Dictionary of dictionaries of VERA++ messages per file. summary_cppcheck: Dictionary of dictionaries of cppcheck messages per file. summary_format: Dictionary of dictionaries of clang-format messages per file. summary_pep8: Dictionary of dictionaries of PEP8 messages per file. Returns ------- Formatted tables string. """ all_tables = '' # VERA++, cppcheck, clang-format if summary_vera is not None and summary_cppcheck is not None and \ summary_format is not None: # Keys, i.e. file names, are identical in these dictionaries. # If this assertion raises an exception, please check travis_build.sh # which runs the Travis CI build. assert (summary_format.keys() == summary_cppcheck.keys()) assert (summary_format.keys() == summary_vera.keys()) # Again: Identical keys for clang-format, cppcheck and VERA++. for file in summary_format.keys(): file_table = '' num_msgs_vera = get_num_msgs_for_file(file, summary_vera) num_msgs_cppcheck = get_num_msgs_for_file(file, summary_cppcheck) num_msgs_format = get_num_msgs_for_file(file, summary_format) if num_msgs_vera > 0 or \ num_msgs_cppcheck > 0 or \ num_msgs_format > 0: file_table = [['+ + + ' + file + ' + + +', '']] if num_msgs_vera > 0: file_table.append(['VERA++ (MSGBLD0135):', 'Count']) for message, count in summary_vera[file].items(): file_table.append([str(message), str(count)]) if num_msgs_cppcheck > 0: file_table.append(['Cppcheck (MSGBLD0155):', 'Count']) for message, count in summary_cppcheck[file].items(): file_table.append([str(message), str(count)]) if num_msgs_format > 0: file_table.append(['clang-format (MSGBLD0175):', 'Count']) for message, count in summary_format[file].items(): file_table.append([str(message), str(count)]) table = AsciiTable(file_table) table.inner_row_border = True file_table = table.table + '\n' all_tables += file_table # PEP8 if summary_pep8 is not None: for file in summary_pep8.keys(): file_table = '' if get_num_msgs_for_file(file, summary_pep8) > 0: file_table = [['+ + + ' + file + ' + + +', '']] file_table.append(['PEP8 (MSGBLD0195):', 'Count']) for message, count in summary_pep8[file].items(): file_table.append([str(message), str(count)]) table = AsciiTable(file_table) table.inner_row_border = True file_table = table.table + '\n' all_tables += file_table return all_tables
def do_it(pp): ids = [] table_data = [] headers = [] no_section = [] for h in pp["columns"]: headers.append(str(h)) table_data.append(headers) SOURCEKEY = pp["columns"].index("SOURCE") DESTKEY = pp["columns"].index("DESTINATION") SERVICEKEY = pp["columns"].index("SERVICE") for id in pp["ruleSections"]: ids.append(id[1]) for id in pp["ruleSections"]: for rule in pp["rules"]: rule = clean_rule(rule) if u"Any" in rule[SOURCEKEY] or u"Any" in rule[DESTKEY] or u"Any" in rule[SERVICEKEY] or u"Disabled" in rule[0]: if rule[len(rule)-1] == id[1]: table_data.append(rule) pp["rules"].remove(rule) elif rule[len(rule)-1] not in ids: if rule not in no_section: no_section.append(rule) pp["rules"].remove(rule) else: pass if len(table_data) > 1: new_table_data = clean_td(table_data) ascii = AsciiTable(table_data) single = SingleTable(table_data) ascii.inner_row_border = True single.inner_row_border = True a = ascii.table s = single.table write_output(a, id[0]) if (verbose): print "--- SECTION: %s ---" % id[0] print s table_data = [] table_data.append(headers) if len(no_section) > 0: table_data = [] headers = [] for h in pp["columns"]: headers.append(str(h)) table_data.append(headers) for rule in no_section: table_data.append(rule) table_data = clean_td(table_data) ascii = AsciiTable(table_data) single = SingleTable(table_data) single.inner_row_border = True ascii.inner_row_border = True a = ascii.table s = single.table write_output(a, "(NO SECTION DEFINED)") if (verbose): print "--- NO SECTION DEFINED (THESE ARE USUALLY AT THE TOP) ---" print s n = name + "-myCVT-output.txt" print "\033[1;32m[+] Written output to file ./%s\n" % n
myScoreB.logger.debug('Row: %i :: Test Cap: %s :: Col: %i :: Ground Truth Label: %s' % (idx_r, row_test_cap.test_sample_pcap_name, idx_c, col_grnd.ground_truth_label)) if col_grnd.ground_truth_label not in header_row: header_row.append(col_grnd.ground_truth_label) score_string = '' for idx_3, dim3 in enumerate(col_grnd.stat_scores): myScoreB.logger.debug('Row: %i :: Test Cap: %s :: Col: %i :: Ground Truth Label: %s ::' ' Stat: %i :: %s : %8.5f' % (idx_r, row_test_cap.test_sample_pcap_name, idx_c, col_grnd.ground_truth_label, idx_3, dim3.stat_name, dim3.score)) score_string += str(dim3.stat_name + ' : ' + str(dim3.score) + '\n') myScoreB.logger.debug('Score String: %s' % score_string.replace('\n', ':::')) single_row.append(score_string) myScoreB.logger.debug('Current Length of Row: %i' % len(single_row)) myScoreB.logger.debug('Row item 1: %s' % single_row[0]) myScoreB.logger.debug('Row item 2: %s' % single_row[1]) table_data.append(single_row) myScoreB.logger.debug("Header Row Len : %i" % len(header_row)) table_data.append(header_row) print('SCORES table:') myTable = AsciiTable(table_data) myTable.inner_row_border = True print(myTable.table) print('PREDICTIONS table:')
from terminaltables import AsciiTable, SingleTable, DoubleTable, GithubFlavoredMarkdownTable table_data = [["Name", "Size"], ["test1", "512M"], ["test2", "1G"]] ascii_table = AsciiTable(table_data) print ascii_table.table ascii_table.inner_row_border = True print ascii_table.table print single_table = SingleTable(table_data) print single_table.table single_table.inner_row_border = True print single_table.table single_table.title = "List" print single_table.table print gfw_table = GithubFlavoredMarkdownTable(table_data) print gfw_table.table """ +-------+------+ | Name | Size | +-------+------+