def allMode(self, inputt): TABLE_DATA = [['All Alice intents']] table_instance = DoubleTable(TABLE_DATA) self.write('\n' + table_instance.table + '\n', 'yellow') TABLE_DATA = [['Intent', 'D', 'Description', 'Example']] table_instance = DoubleTable(TABLE_DATA) table_instance.justify_columns[1] = 'center' for dtIntentName in self._intentNameSkillMatching: tDesc = self._intentsModulesValues[dtIntentName][ '__otherattributes__']['description'] tEnabledByDefault = self._intentsModulesValues[dtIntentName][ '__otherattributes__']['enabledByDefault'] tUtterance = random.choice( list(self._intentsModulesValues[dtIntentName]['utterances'])) if not inputt.getOption('full'): tDesc = (tDesc[:self.DESCRIPTION_MAX] + '..') if len(tDesc) > self.DESCRIPTION_MAX else tDesc tUtterance = ( tUtterance[:self.DESCRIPTION_MAX] + '..' ) if len(tUtterance) > self.DESCRIPTION_MAX else tUtterance TABLE_DATA.append([ dtIntentName, 'X' if tEnabledByDefault else '', '-' if not tDesc else tDesc, '-' if not tUtterance else tUtterance ]) self.write(table_instance.table)
def execute(self, inputt): TABLE_DATA = [['Authors List']] table_instance = DoubleTable(TABLE_DATA) self.write('\n' + table_instance.table + '\n', 'magenta') TABLE_DATA = [['Name']] table_instance = DoubleTable(TABLE_DATA) try: req = requests.get('https://api.github.com/' + ModuleManager.GITHUB_API_BASE_URL) if req.status_code == 403: self.write( '<bg:red> Github API quota limitations reached<bg:reset>\n' ) return result = req.content authors = json.loads(result.decode()) for author in authors: TABLE_DATA.append([ author['name'], ]) except Exception: self.write('Error listing authors', 'red') raise self.write(table_instance.table)
def execute(self, inputt): TABLE_DATA = [['Modules created by ' + inputt.getArgument('authorName')]] table_instance = DoubleTable(TABLE_DATA) self.write('\n' + table_instance.table + '\n', 'yellow') TABLE_DATA = [['Name', 'Version', 'Langs', 'Description']] table_instance = DoubleTable(TABLE_DATA) try: req = requests.get('https://api.github.com/' + ModuleManager.GITHUB_API_BASE_URL + '/' + inputt.getArgument('authorName')) if req.status_code == 403: self.write('<bg:red> Github API quota limitations reached<bg:reset>\n') return elif req.status_code // 100 == 4: self.write('> Unknown author <fg:red>' + inputt.getArgument('authorName') + '<fg:reset>') self.write('- You can use <fg:yellow>author:list<fg:reset> to list all authors\n') return result = req.content modules = json.loads(result.decode()) for module in modules: moduleInstallFile = module['html_url'] \ .replace('github.com', 'raw.githubusercontent.com') \ .replace('/blob', '') \ .replace('/tree', '') \ + '/' + module['name'] + '.install' try: req = requests.get(moduleInstallFile) result = req.content moduleDetails = json.loads(result.decode()) tLangs = '|'.join(moduleDetails['conditions']['lang']) if 'lang' in moduleDetails['conditions'] else '-' tDesc = moduleDetails['desc'] if not inputt.getOption('full'): tDesc = (tDesc[:self.DESCRIPTION_MAX] + '..') if len(tDesc) > self.DESCRIPTION_MAX else tDesc TABLE_DATA.append([ moduleDetails['name'], moduleDetails['version'], tLangs, tDesc ]) except Exception: self.write('Error get module {}'.format(module['name']), 'red') raise except Exception: self.write('Error listing modules', 'red') raise self.write(table_instance.table)
def print_probabilities(self): """ Prints a copy of the current probabilities. Used for convenient checking in a command line environment. For dictionaries containing the raw values, use the `p_*` attributes. :return: """ # create copies to avoid editing p_initial = copy.deepcopy(self.p_initial) p_emission = copy.deepcopy(self.p_emission) p_transition = copy.deepcopy(self.p_transition) # convert to nested lists for clean printing p_initial = [[str(s)] + [str(round(p_initial[s], 3))] for s in self.states] p_emission = [ [str(s)] + [str(round(p_emission[s][e], 3)) for e in self.emissions] for s in self.states ] p_transition = [ [str(s1)] + [str(round(p_transition[s1][s2], 3)) for s2 in self.states] for s1 in self.states ] p_initial.insert(0, ["S_i", "Y_0"]) p_emission.insert(0, ["S_i \ E_j"] + [str(e) for e in self.emissions]) p_transition.insert(0, ["S_i \ E_j"] + [str(s) for s in self.states]) # format tables ti = DoubleTable(p_initial, "Starting state probabilities") te = DoubleTable(p_emission, "Emission probabilities") tt = DoubleTable(p_transition, "Transition probabilities") te.padding_left = 1 te.padding_right = 1 tt.padding_left = 1 tt.padding_right = 1 te.justify_columns[0] = "right" tt.justify_columns[0] = "right" # print tables print("\n") print(ti.table) print("\n") print(te.table) print("\n") print(tt.table) print("\n") # return None
def print_fit_parameters(self): """ Prints a copy of the current state counts. Used for convenient checking in a command line environment. For dictionaries containing the raw values, use the `n_*` attributes. :return: """ # create copies to avoid editing n_initial = copy.deepcopy(self.n_initial) n_emission = copy.deepcopy(self.n_emission) n_transition = copy.deepcopy(self.n_transition) # make nested lists for clean printing initial = [[str(s)] + [str(n_initial[s])] for s in self.states] initial.insert(0, ["S_i", "Y_0"]) emissions = [[str(s)] + [str(n_emission[s][e]) for e in self.emissions] for s in self.states] emissions.insert(0, ["S_i \ E_i"] + list(map(str, self.emissions))) transitions = [[str(s1)] + [str(n_transition[s1][s2]) for s2 in self.states] for s1 in self.states] transitions.insert(0, ["S_i \ S_j"] + list(map(lambda x: str(x), self.states))) # format tables ti = DoubleTable(initial, "Starting state counts") te = DoubleTable(emissions, "Emission counts") tt = DoubleTable(transitions, "Transition counts") ti.padding_left = 1 ti.padding_right = 1 te.padding_left = 1 te.padding_right = 1 tt.padding_left = 1 tt.padding_right = 1 ti.justify_columns[0] = "right" te.justify_columns[0] = "right" tt.justify_columns[0] = "right" # print tables print("\n") print(ti.table) print("\n") print(te.table) print("\n") print(tt.table) print("\n") # return None
def execute(self, inputt): TABLE_DATA = [['Assistant Downloader']] table_instance = DoubleTable(TABLE_DATA) self.write('\n' + table_instance.table + '\n', 'green') languageManager = LanguageManager() languageManager.onStart() threadManager = ThreadManager() threadManager.onStart() protectedIntentManager = ProtectedIntentManager() protectedIntentManager.onStart() databaseManager = DatabaseManager() databaseManager.onStart() userManager = UserManager() userManager.onStart() moduleManager = ModuleManager() moduleManager.onStart() snipsConsoleManager = SnipsConsoleManager() snipsConsoleManager.onStart() self.write('It may take some time...') snipsConsoleManager.download(languageManager.activeSnipsProjectId) self.nl() self.nl() self.write('Assistant <fg:green>downloaded!<fg:reset>') self.nl()
def home(): # Configure the network interface and gateway... global up_interface up_interface = open('/opt/dscan/tools/files/iface.txt', 'r').read() up_interface = up_interface.replace("\n","") if up_interface == "0": up_interface = os.popen("route | awk '/Iface/{getline; print $8}'").read() up_interface = up_interface.replace("\n","") global gateway gateway = open('/opt/dscan/tools/files/gateway.txt', 'r').read() gateway = gateway.replace("\n","") if gateway == "0": gateway = os.popen("ip route show | grep -i 'default via'| awk '{print $3 }'").read() gateway = gateway.replace("\n","") n_name = os.popen('iwgetid -r').read() # Get wireless network name n_mac = os.popen("ip addr | grep 'state UP' -A1 | tail -n1 | awk '{print $2}' | cut -f1 -d'/'").read() # Get network mac n_ip = os.popen("hostname -I").read() # Local IP address n_host = os.popen("hostname").read() # hostname #-----------------------------------------------------------# if not os.geteuid() == 0: sys.exit("\n[!] DonkeyScanner must be run as root. ¯\_(ツ)_/¯ \n") author = "CR4CKB0X (9R4M4N K4SL1W4L)" print(xe_header()) print ("\t\t\t\t\t\t A complete Network-MAP-per\n") print("Created By: "+author+"\nGit Account: https://github.com/Praman1997") print("--------------------------------------------------------------------------") print("\n\n") # Printing the network configuration table = [["IP Address","MAC Address","Gateway","Iface","Hostname"], ["","","","",""], [n_ip,n_mac.upper(),gateway,up_interface,n_host]] table = DoubleTable(table) print (table.table) print("----------------------------\n\n")
def seleccionar_ciudades_validas(self, term, tipo_ciudad): """En base a un ingreso valida el nombre de la ciudad""" ciudades_posibles = self.motor.obtener_ciudades_posibles(term) posicion = -1 table_data = [["Opcion", "Ciudades"]] for id, ciudad in sorted(ciudades_posibles.items()): table_data.append([id, ciudad]) table = DoubleTable(table_data, "Seleccion de ciudades disponibles") table.justify_columns = {0: 'center', 1: 'left'} print(table.table, "\n") while posicion == -1: try: print( 'Confirme la opción de la ciudad ingresada. Oprima 0 (Cero) para ingresarla nuevamente.' ) numero = int(input("Número de opción correcta: ")) if numero == 0: return self.obtener_nombre_de_ciudad(tipo_ciudad) elif len(ciudades_posibles) >= numero >= 1: posicion = numero else: print("Debe ingresar un numero entre 1 y " + str(len(ciudades_posibles))) except ValueError: print("Debe ingresar un numero entre 1 y " + str(len(ciudades_posibles))) return ciudades_posibles[posicion]
def print_table(title, data): header = ('Язык программирования', 'Вакансий найдено', 'Вакансий обработано', 'Средняя зарплата') data.insert(0, header) table_instance = DoubleTable(data, title) table_instance.justify_columns[2] = 'right' print(table_instance.table) print()
def get_lab_info(self, lab_hash=None, machine_name=None, all_users=False): if all_users: raise NotSupportedError("Cannot use `--all` flag.") if lab_hash: lab_hash = lab_hash.lower() table_header = ["LAB HASH", "DEVICE NAME", "STATUS", "ASSIGNED NODE"] stats_table = DoubleTable([]) stats_table.inner_row_border = True while True: machines_stats = self.k8s_machine.get_machines_info(lab_hash=lab_hash, machine_filter=machine_name) machines_data = [ table_header ] for machine_stats in machines_stats: machines_data.append([machine_stats["real_lab_hash"], machine_stats["name"], machine_stats["status"], machine_stats["assigned_node"] ]) stats_table.table_data = machines_data yield "TIMESTAMP: %s" % datetime.now() + "\n\n" + stats_table.table
def assistant(): """Update the voice assistant by retraining""" _logger = logging.getLogger('ProjectAlice') _logger.setLevel(logging.INFO) _logger.addHandler(logging.StreamHandler()) TABLE_DATA = [['Assistant Downloader']] table_instance = DoubleTable(TABLE_DATA) click.secho(f'\n{table_instance.table}\n', fg='green') superManager = SuperManager(None) superManager.initManagers() superManager.onStart() snipsConsoleManager = superManager.getManager('SnipsConsoleManager') languageManager = superManager.getManager('LanguageManager') downloaded = snipsConsoleManager.download( languageManager.activeSnipsProjectId) if downloaded: click.echo(f"\n\nAssistant {click.style('downloaded!', fg='green')}\n") else: click.echo( f"\n\nAssistant {click.style('download failed', fg='red')}\n", err=True)
def main(): """Main function.""" title = 'Jetta SportWagen' # AsciiTable. table_instance = AsciiTable(TABLE_DATA, title) table_instance.justify_columns[2] = 'right' print(table_instance.table) print() # SingleTable. table_instance = SingleTable(TABLE_DATA, title) table_instance.justify_columns[2] = 'right' print(table_instance.table) print() # DoubleTable. table_instance = DoubleTable(TABLE_DATA, title) table_instance.justify_columns[2] = 'right' print(table_instance.table) print() # BorderlessTable. table_instance = BorderlessTable(TABLE_DATA, title) table_instance.justify_columns[2] = 'right' print(table_instance.table) print()
def get_host_reputation_table(response_list): data_list = [] header = [ 'Verdict', 'Threat Status', 'Threat Name', 'Threat Type', 'First Seen', 'Last Seen', ] data_list.append(header) response = response_list[0] threat_data = response.get('threatData') data = [ threat_data.get('verdict'), threat_data.get('threatStatus'), threat_data.get('threatName'), threat_data.get('threatType'), threat_data.get('firstSeen'), threat_data.get('lastSeen'), ] data_list.append(data) host_reputation = DoubleTable(data_list) host_reputation.padding_left = 1 host_reputation.padding_right = 1 host_reputation.inner_column_border = True host_reputation.inner_row_border = True return host_reputation.table
def archs(self): filtered = [] table = [] archs = sorted(list(self.executor.get_archs())) cur = [archs[0]] loc_max = MN_INF for pos in range(1, len(archs)): if self.__is_similar(archs[pos], archs[pos-1]): cur.append(archs[pos]) else: loc_max = max(loc_max, len(cur)) filtered.append(['<cyan>{}</>'.format(x) for x in cur]) cur = [archs[pos]] filtered.append(['<cyan>{}</>'.format(x) for x in cur]) loc_max = max(loc_max, len(cur)) table.append(['\r'] * len(filtered)) for i in range(loc_max): cur_row = [] for j in range(len(filtered)): cur_row.append('' if i >= len(filtered[j]) else make_colors(filtered[j][i])) table.append(cur_row) rtable = DoubleTable(table) rtable.inner_heading_row_border = False return rtable.table
def scan(ip): print "[+] Maping your network ..." scan = os.popen("nmap " + ip + " -n -sP ").read() output = "nmap-scan-ip.txt" with open(output, 'w') as f: f.write(scan) devices = os.popen(" grep report " + output + " | awk '{print $5}'").read() my_mac = os.popen( "ip addr | grep 'state UP' -A1 | tail -n1 | awk '{print $2}' | cut -f1 -d'/'" ).read().upper() if system() != 'Windows' else 'Unknow' devices_mac = os.popen("grep MAC " + output + " | awk '{print $3}'").read() + my_mac devices_name = os.popen("grep MAC " + output + " | awk '{print $4 ,S$5 $6}'").read( ) + "\033[1;32m(This device)\033[1;m" table_data = [['IP Address', 'Mac Address', 'Manufacturer'], [devices, devices_mac, devices_name]] table = DoubleTable(table_data) print( "\033[1;95m[+]===========[ Devices found on your network ]==========[+]\n\033[1;m" ) print(table.table)
def test_single_line(): """Test single-lined cells.""" table_data = [ ['Name', 'Color', 'Type'], ['Avocado', 'green', 'nut'], ['Tomato', 'red', 'fruit'], ['Lettuce', 'green', 'vegetable'], ['Watermelon', 'green'], [], ] table = DoubleTable(table_data, 'Example') table.inner_footing_row_border = True table.justify_columns[0] = 'left' table.justify_columns[1] = 'center' table.justify_columns[2] = 'right' actual = table.table expected = ( u'\u2554Example\u2550\u2550\u2550\u2550\u2550\u2566\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2566\u2550\u2550' u'\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557\n' u'\u2551 Name \u2551 Color \u2551 Type \u2551\n' u'\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256c\u2550\u2550\u2550\u2550' u'\u2550\u2550\u2550\u256c\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563\n' u'\u2551 Avocado \u2551 green \u2551 nut \u2551\n' u'\u2551 Tomato \u2551 red \u2551 fruit \u2551\n' u'\u2551 Lettuce \u2551 green \u2551 vegetable \u2551\n' u'\u2551 Watermelon \u2551 green \u2551 \u2551\n' u'\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256c\u2550\u2550\u2550\u2550' u'\u2550\u2550\u2550\u256c\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563\n' u'\u2551 \u2551 \u2551 \u2551\n' u'\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2569\u2550\u2550\u2550\u2550' u'\u2550\u2550\u2550\u2569\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d' ) assert actual == expected
def get_lab_info(self, lab_hash=None, machine_name=None, all_users=False): user_name = utils.get_current_user_name() if not all_users else None machine_streams = self.docker_machine.get_machines_info( lab_hash, machine_filter=machine_name, user=user_name) table_header = [ "LAB HASH", "USER", "DEVICE NAME", "STATUS", "CPU %", "MEM USAGE / LIMIT", "MEM %", "NET I/O" ] stats_table = DoubleTable([]) stats_table.inner_row_border = True while True: machines_data = [table_header] try: result = next(machine_streams) except StopIteration: return if not result: return for machine_stats in result: machines_data.append([ machine_stats['real_lab_hash'], machine_stats['user'], machine_stats['name'], machine_stats['status'], machine_stats['cpu_usage'], machine_stats['mem_usage'], machine_stats['mem_percent'], machine_stats['net_usage'] ]) stats_table.table_data = machines_data yield "TIMESTAMP: %s" % datetime.now() + "\n\n" + stats_table.table
def execute(self, inputt): TABLE_DATA = [['Assistant Dialog Templates Sync']] table_instance = DoubleTable(TABLE_DATA) self.write('\n' + table_instance.table + '\n', 'green') languageManager = LanguageManager() languageManager.onStart() threadManager = ThreadManager() threadManager.onStart() snipsConsoleManager = SnipsConsoleManager() snipsConsoleManager.onStart() samkillaManager = SamkillaManager() self.write('It may take some time...') changes = samkillaManager.sync(download=False) self.nl() if changes: self.write('There are <fg:green>changes<fg:reset>') else: self.write('There are no <fg:red>changes<fg:reset>') self.nl() self.write('All dialog templates <fg:green>synced!<fg:reset>') self.nl() if inputt.getOption('download'): snipsConsoleManager.download(languageManager.activeSnipsProjectId) self.write('Downloading assistant...') self.nl() self.write('Assistant <fg:green>downloaded!<fg:reset>') self.nl()
def main(path_name): performance = dict() path = Path(path_name) for summary_path in path.glob('*/summary.csv'): name = summary_path.parent.name match = re.search('^(?P<suite_name>.*Town.*-v[0-9]+.*)_seed(?P<seed>[0-9]+)', name) suite_name = match.group('suite_name') seed = match.group('seed') summary = pd.read_csv(summary_path) if suite_name not in performance: performance[suite_name] = dict() performance[suite_name][seed] = (summary['success'].sum(), len(summary)) table_data = [] for suite_name, seeds in performance.items(): successes, totals = np.array(list(zip(*seeds.values()))) rates = successes / totals * 100 if len(seeds) > 1: table_data.append([suite_name, "%.1f ± %.1f"%(np.mean(rates), np.std(rates, ddof=1)), "%d/%d"%(sum(successes),sum(totals)), ','.join(sorted(seeds.keys()))]) else: table_data.append([suite_name, "%d"%np.mean(rates), "%d/%d"%(sum(successes),sum(totals)), ','.join(sorted(seeds.keys()))]) table_data = sorted(table_data, key=lambda row: row[0]) table_data = [('Suite Name', 'Success Rate', 'Total', 'Seeds')] + table_data table = DoubleTable(table_data, "Performance of %s"%path.name) print(table.table)
def print_vulnerability(cls, vulnerability: Vulnerabilities): """ print_vulnerability takes a vulnerability, and well, it prints it """ cvss_score = vulnerability.get_cvss_score() table_data = [ ["ID", vulnerability.get_id()], ["Title", vulnerability.get_title()], ["Description", '\n'.join(wrap(vulnerability.get_description(), 100))], ["CVSS Score", f"{vulnerability.get_cvss_score()} - {cls.get_cvss_severity(cvss_score)}"], ] if vulnerability.get_cvss_vector(): table_data.append( ["CVSS Vector", vulnerability.get_cvss_vector()] ) table_data.extend( [ ["CVE", vulnerability.get_cve()], ["Reference", vulnerability.get_reference()] ] ) table_instance = DoubleTable(table_data) table_instance.inner_heading_row_border = False table_instance.inner_row_border = True cls.do_print(table_instance.table, cvss_score) print("----------------------------------------------------")
def scan(): config0() scan = os.popen("nmap " + gateway + "/24 -n -sP ").read() f = open('/opt/xerosploit/tools/log/scan.txt','w') f.write(scan) f.close() devices = os.popen(" grep report /opt/xerosploit/tools/log/scan.txt | awk '{print $5}'").read() devices_mac = os.popen("grep MAC /opt/xerosploit/tools/log/scan.txt | awk '{print $3}'").read() + os.popen("ip addr | grep 'state UP' -A1 | tail -n1 | awk '{print $2}' | cut -f1 -d'/'").read().upper() # get devices mac and localhost mac address devices_name = os.popen("grep MAC /opt/xerosploit/tools/log/scan.txt | awk '{print $4 ,S$5 $6}'").read() + "\033[1;32m(This device)\033[1;m" table_data = [ ['IP Address', 'Mac Address', 'Manufacturer'], [devices, devices_mac, devices_name] ] table = DoubleTable(table_data) # Show devices found on your network print("\033[1;36m[+]═══════════[ Devices found on your network ]═══════════[+]\n\033[1;m") print(table.table) target_ip()
def get_table(self, arch, pattern, colored=False, verbose=False): ''' This function is used in sys command (when user want to find a specific syscall) :param Architecture for syscall table; :param Searching pattern; :param Flag for verbose output :return Return a printable table of matched syscalls ''' if pattern != None and pattern != '': rawtable = self.search(arch, pattern) else: rawtable = self.tables[arch] if len(rawtable) == 0: return None used_hd = self.__fetch_used_headers(rawtable, verbose) table = [ self.__make_colored_row(used_hd, 'yellow,bold', upper=True) if colored else used_hd ] for command in rawtable: cur_tb_field = [] for hd in used_hd: value = command[hd] cur_tb_field.append( self.__make_colored_field(value, hd, verbose=verbose)) table.append(cur_tb_field) return DoubleTable(table)
def display_tabled_gads(authors, rendered_gad_months, title, border, width=0): """Display a table of gads per author according to gads_render_func.""" if len(authors) <= 1: width = 1 elif width == 0: gad_width = max( [non_ansi_len(l) for l in rendered_gad_months[1].splitlines()]) author_width = max([non_ansi_len(a) for a in authors]) auto_width = (MAX_WIDTH - 1) / (max(gad_width, author_width) + 3) width = max(1, auto_width) table_data = list( chain.from_iterable( izip( [authors[i:i + width] for i in xrange(0, len(authors), width)], [ rendered_gad_months[i:i + width] for i in xrange(0, len(rendered_gad_months), width) ]))) if border == "ascii": display_table = AsciiTable(table_data) elif border == "single": display_table = SingleTable(table_data) elif border == "double": display_table = DoubleTable(table_data) else: exit(1) display_table.inner_row_border = True display_table.inner_column_border = True display_table.title = title sys.stdout.write(fg(DEFAULT_COLOR)) print display_table.table.encode('utf-8'), attr(0).encode('utf-8')
def terminal_result(summary): def ident(value): return str(value) def pct_format(value): return str(round(value * 100, 2)) + ' %' projections = [ ('Attacks', lambda s: s.attack_runs, ident), ('Attacker', None, ident), ('Casualties', lambda s: s.attacker_casualties.units, ident), ('Casualties %', lambda s: s.attacker_casualties.ratio, pct_format), ('Survivors', lambda s: s.attacker_casualties.survivors, ident), ('Defender', None, ident), ('Casualties', lambda s: s.defender_casualties.units, ident), ('Casualties %', lambda s: s.defender_casualties.ratio, pct_format), ('Survivors', lambda s: s.defender_casualties.survivors, ident), ] table_data = [['Metric'] + [x for x in Stats.selector]] for name, p, f in projections: table_data.append([name] + [ f(m(p(summary))) if p is not None else '--' for m in Stats.selector.values() ]) return DoubleTable( table_data, 'Success Rate: {} %'.format(round(summary.victorious.average * 100, 2))).table
def get_lab_info(self, recursive, lab_hash=None, machine_name=None, all_users=False): user_name = utils.get_current_user_name() if not all_users else None if not recursive: machines = self.docker_machine.get_machines_by_filters(lab_hash=lab_hash, machine_name=machine_name, user=user_name ) else: machines = self.docker_machine.get_machines_by_filters_rec(lab_hash=lab_hash, machine_name=machine_name, user=user_name ) if not machines: if not lab_hash: raise Exception("No machines running.") else: raise Exception("Lab is not started.") machines = sorted(machines, key=lambda x: x.name) machine_streams = {} for machine in machines: machine_streams[machine] = machine.stats(stream=True, decode=True) table_header = ["LAB HASH", "USER", "MACHINE NAME", "STATUS", "CPU %", "MEM USAGE / LIMIT", "MEM %", "NET I/O"] stats_table = DoubleTable([]) stats_table.inner_row_border = True while True: machines_data = [ table_header ] for (machine, machine_stats) in machine_streams.items(): real_name = machine.labels['name'] if recursive: path = machine.exec_run('hostname')[1].decode('utf-8') real_name_split = path.split('.') real_name = ('.'.join(real_name_split[:-1])) try: result = next(machine_stats) except StopIteration: continue stats = self._get_aggregate_machine_info(result) machines_data.append([machine.labels['lab_hash'], machine.labels['user'], real_name, machine.status, stats["cpu_usage"], stats["mem_usage"], stats["mem_percent"], stats["net_usage"] ]) stats_table.table_data = machines_data yield "TIMESTAMP: %s" % datetime.now() + "\n\n" + stats_table.table
def echo_table(table, **kwargs): t = DoubleTable([[safe_str(cell) for cell in row] for row in table], **kwargs) t.inner_row_border = True click.echo(t.table)
def __repr__(self): list_data = [ ('Key ID','User Name','User Email','User Comment') ] for k in self._store: for i in k.userids: list_data.append( (k.fingerprint.keyid, i.name, i.email, i.comment) ) table = DoubleTable(list_data) return f'''\
def print_result(result): tableData = [] tableHead = ["Title(name)", "City", "Link / Meetup / Date"] tableData.append(tableHead) prevTitle = None for row in result: name = "(" + row[0] + ")" title = colored(row[1], 'green') city = colored(row[2], 'cyan') try: info = row[3:] except BaseException: info = None if len(info) == 0: tableData.append([title, city, '']) tableData.append([name, "", '']) else: meetup = info[0] date = info[1] link = info[2] date = colored(date, 'yellow') if title == prevTitle: tableData.append(['', '', date]) tableData.append([name, '', meetup]) tableData.append(['', '', link]) else: tableData.append([title, city, date]) tableData.append([name, '', meetup]) tableData.append(['', '', link]) prevTitle = title print(DoubleTable(tableData).table)
def _print_scores(live_feeds, args): if len(live_feeds) == 0: print('No live matches at this time') return if args.refresh > 0: os.system('clear') for feed in live_feeds: live_scores = [] # Add the team scores to the display object live_scores.append(['Current time', "{} ({})".format(datetime.now().strftime('%H:%M:%S'), datetime.utcnow().strftime('%H:%M:%S'))]) live_scores.append(['Match', "{}, {} v {} at {}, {}".format(feed.series[0]['series_name'], feed.details['team1_name'], feed.details['team2_name'], feed.details['ground_name'], feed.details['start_date'])]) # if feed.details['present_datetime_gmt'] <= feed.details['start_datetime_gmt']: # live_scores.append( # [ # 'Start', # "{} in {}".format( # feed.details['start_time_gmt'], # ( # datetime.strptime(feed.details['start_datetime_gmt'], "%Y-%m-%d %H:%M:%S") - datetime.utcnow() # ) # ) # ] # ) live_scores.append(['Status', feed.status()]) if feed.details['present_datetime_gmt'] >= feed.details['start_datetime_gmt']: live_scores.append(['Summary', feed.summary()]) table = DoubleTable(live_scores) table.inner_row_border = True table.justify_columns = {0: 'center', 1: 'center', 2: 'center'} print(table.table)
def table(items, title): """ 生成表格 """ table_instance = DoubleTable(items, "SM.MS - {}".format(title)) table_instance.inner_row_border = True return table_instance.table