def long_format(self, records): """Format records in long format. Args: records: Controlled records to format. Returns: str: Record data in long format. """ title = util.hline(self.title_fmt % {'model_name': records[0].name.capitalize(), 'storage_path': records[0].storage}, 'cyan') retval = [title] for record in records: rows = [['Attribute', 'Value', 'Command Flag', 'Description']] populated = record.populate() for key, val in sorted(populated.iteritems()): if key != self.model.key_attribute: rows.append(self._format_long_item(key, val)) table = Texttable(logger.LINE_WIDTH) table.set_cols_align(['r', 'c', 'l', 'l']) table.set_deco(Texttable.HEADER | Texttable.VLINES) table.add_rows(rows) retval.append(util.hline(populated[self.model.key_attribute], 'cyan')) retval.extend([table.draw(), '']) return retval
def do_list(self, args): try: doParser = self.arg_list() doArgs = doParser.parse_args(shlex.split(args)) org = org_utils.org_get(self.api, doArgs.org) printer.out("Getting user list for ["+org.name+"] . . .") allUsers = self.api.Orgs(org.dbId).Members.Getall() allUsers = order_list_object_by(allUsers.users.user, "loginName") table = Texttable(200) table.set_cols_align(["l", "l", "c"]) table.header(["Login", "Email", "Active"]) for item in allUsers: if item.active: active = "X" else: active = "" table.add_row([item.loginName, item.email, active]) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("In Arguments: "+str(e), printer.ERROR) self.help_list() except Exception as e: return handle_uforge_exception(e)
def load_fs(): t = Texttable() # headers/columns columns = ['Fset#'] for k, h in sorted(features_map.items(), key=lambda kv: kv[1][1]): columns.append(h[1]) matrix_length = len(columns) t.set_cols_width([5] * matrix_length) t.set_cols_align(['c'] * matrix_length) t.set_cols_valign(['m'] * matrix_length) t.set_cols_dtype(['t'] * matrix_length) root_path = os.path.dirname(os.path.realpath(__file__)) fs_dir = os.path.join(root_path, 'config/general_config/') for fs_filename in sorted(os.listdir(fs_dir)): fs_dict = {} if fs_filename.startswith('featureset0') and \ fs_filename.endswith('.yml'): with open(os.path.join(fs_dir, fs_filename)) as fs_file: fs_dict = yaml.load(fs_file, yaml.SafeLoader) datarow = get_data_from_yaml(fs_dict, fs_filename) t.add_rows([columns, datarow]) fs_list.append(fs_filename[10:13]) print(t.draw()) print('\n')
class tabela_ambiente(): def __init__(self, ambiente): self.ambiente = ambiente self.table = Texttable() def print_table(self, global_time): self.table.reset() self.table.set_deco(Texttable.HEADER) self.table.set_cols_dtype(['t', # text 't', # float ]) self.table.set_cols_align(["l", "c"]) self.table.add_rows([["Informações do ambiente", ""], ["Hora: ", str(datetime.timedelta(seconds=global_time))], ["Temperatura: ", str(round(self.ambiente.temperatura, 2))], ["Chuva: ", str(self.ambiente.chuva)], ["Estado Atmosférico: ", str(self.ambiente.estado_atmosferico)], ["Sujeira: ", str(self.ambiente.sujeira)], [" ", " "], ["Último movimento foi há: ", str(datetime.timedelta(seconds=self.ambiente.mov_count))], ["Ar-condicionado: ", str(self.ambiente.ar_condicionado)], ["Aquecedor: ", str(self.ambiente.aquecedor)], ["Lâmpada: ", str(self.ambiente.lampada)], ["Porta: ", str(self.ambiente.porta)], ["Janela: ", str(self.ambiente.janela)], ["Televisão: ", str(self.ambiente.televisão)], ["Aspirador de pó: ", str(self.ambiente.aspirador)]]) print(self.table.draw())
def do_info(self, args): try: doParser = self.arg_info() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Getting user ["+doArgs.account+"] ...") user = self.api.Users(doArgs.account).Get() if user is None: printer.out("user "+ doArgs.account +" does not exist", printer.ERROR) else: if user.active: active = "X" else: active = "" printer.out("Informations about " + doArgs.account + ":",) table = Texttable(200) table.set_cols_align(["c", "l", "c", "c", "c", "c", "c", "c"]) table.header(["Login", "Email", "Lastname", "Firstname", "Created", "Active", "Promo Code", "Creation Code"]) table.add_row([user.loginName, user.email, user.surname , user.firstName, user.created.strftime("%Y-%m-%d %H:%M:%S"), active, user.promoCode, user.creationCode]) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("In Arguments: "+str(e), printer.ERROR) self.help_info() except Exception as e: return handle_uforge_exception(e)
def do_list(self, args): try: #call UForge API printer.out("Getting generation formats for ["+self.login+"] ...") targetFormatsUser = self.api.Users(self.login).Targetformats.Getall() if targetFormatsUser is None or len(targetFormatsUser.targetFormats.targetFormat) == 0: printer.out("No generation formats available") return 0 else: targetFormatsUser = generics_utils.order_list_object_by(targetFormatsUser.targetFormats.targetFormat,"name") table = Texttable(200) table.set_cols_align(["l", "l", "l", "l", "l", "c"]) table.header(["Name", "Format", "Category", "Type", "CredAccountType", "Access"]) for item in targetFormatsUser: if item.access: access = "X" else: access = "" if item.credAccountType is None: credAccountType = "" else: credAccountType = item.credAccountType table.add_row( [item.name, item.format.name, item.category.name, item.type, credAccountType, access]) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("In Arguments: " + str(e), printer.ERROR) self.help_list() except Exception as e: return handle_uforge_exception(e)
def test_texttable_header(): table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_dtype([ 't', # text 'f', # float (decimal) 'e', # float (exponent) 'i', # integer 'a', # automatic ]) table.set_cols_align(["l", "r", "r", "r", "l"]) table.add_rows([ ["text", "float", "exp", "int", "auto"], ["abcd", "67", 654, 89, 128.001], ["efghijk", 67.5434, .654, 89.6, 12800000000000000000000.00023], ["lmn", 5e-78, 5e-78, 89.4, .000000000000128], ["opqrstu", .023, 5e+78, 92., 12800000000000000000000], ]) assert clean(table.draw()) == dedent('''\ text float exp int auto ============================================== abcd 67.000 6.540e+02 89 128.001 efghijk 67.543 6.540e-01 90 1.280e+22 lmn 0.000 5.000e-78 89 0.000 opqrstu 0.023 5.000e+78 92 1.280e+22 ''')
def list(self): """List the Drbd volumes and statuses""" # Set permissions as having been checked, as listing VMs # does not require permissions self._get_registered_object('auth').set_permission_asserted() # Create table and add headers table = Texttable() table.set_deco(Texttable.HEADER | Texttable.VLINES) table.header(('Name', 'Type', 'Location', 'Nodes', 'Shared', 'Free Space', 'ID')) # Set column alignment and widths table.set_cols_width((15, 5, 30, 70, 6, 15, 50)) table.set_cols_align(('l', 'l', 'l', 'l', 'l', 'l', 'l')) for storage_backend in self.get_all(): table.add_row(( storage_backend.name, storage_backend.storage_type, storage_backend.get_location(), ', '.join(storage_backend.nodes), str(storage_backend.shared), SizeConverter(storage_backend.get_free_space()).to_string(), storage_backend.id_ )) return table.draw()
def find_commands(db, *filters): user_filter = '\s+'.join(filters) user_re = re.compile(user_filter) RE_CACHE[user_filter] = user_re query = ''' SELECT hostname, timestamp, duration, user_string FROM commands WHERE timestamp > ? AND user_string REGEXP ? ORDER BY timestamp ''' table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_align(('l', 'r', 'r', 'l')) table.header(('host', 'date', 'duration', 'command')) host_width = 6 max_command_width = 9 now = time.time() for row in db.execute(query, (TIMESTAMP, user_filter)): host_width = max(host_width, len(row[0])) max_command_width = max(max_command_width, len(row[3])) table.add_row(( row[0], format_time(row[1], now), format_duration(row[2]) if row[2] > 0 else '', highlight(row[3], user_re))) table.set_cols_width((host_width, 30, 10, max_command_width + 2)) print table.draw()
def test_texttable(): table = Texttable() table.set_cols_align(["l", "r", "c"]) table.set_cols_valign(["t", "m", "b"]) table.add_rows([ ["Name", "Age", "Nickname"], ["Mr\nXavier\nHuon", 32, "Xav'"], ["Mr\nBaptiste\nClement", 1, "Baby"], ["Mme\nLouise\nBourgeau", 28, "Lou\n \nLoue"], ]) assert clean(table.draw()) == dedent('''\ +----------+-----+----------+ | Name | Age | Nickname | +==========+=====+==========+ | Mr | | | | Xavier | 32 | | | Huon | | Xav' | +----------+-----+----------+ | Mr | | | | Baptiste | 1 | | | Clement | | Baby | +----------+-----+----------+ | Mme | | Lou | | Louise | 28 | | | Bourgeau | | Loue | +----------+-----+----------+ ''')
def do_list(self, args): try: doParser = self.arg_list() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Getting entitlements list of the UForge :") entList = self.api.Entitlements.Getall() if entList is None: printer.out("No entitlements found.", printer.OK) else: entList=generics_utils.order_list_object_by(entList.entitlements.entitlement, "name") printer.out("Entitlement list for the UForge :") table = Texttable(200) table.set_cols_align(["l", "l"]) table.header(["Name", "Description"]) table.set_cols_width([30,60]) for item in entList: table.add_row([item.name, item.description]) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_list() except Exception as e: return handle_uforge_exception(e)
def do_list(self, args): try: org_name = None if args: do_parser = self.arg_list() try: do_args = do_parser.parse_args(shlex.split(args)) except SystemExit as e: return org_name = do_args.org # call UForge API printer.out("Getting all the roles for the organization...") org = org_utils.org_get(self.api, org_name) all_roles = self.api.Orgs(org.dbId).Roles().Getall(None) table = Texttable(200) table.set_cols_align(["c", "c"]) table.header(["Name", "Description"]) for role in all_roles.roles.role: table.add_row([role.name, role.description]) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_list() except Exception as e: return marketplace_utils.handle_uforge_exception(e)
def do_list_changesets(self, arg, opts=None): """Show changesets needing review.""" changesets = requests.get( "http://%s/api/v1/changeset/" % self.site, params={"review_status": "needs"}, auth=self.api_auth ) objects = changesets.json().get("objects") table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_align(["c", "c", "c", "c", "c"]) table.set_cols_width([5, 20, 15, 15, 10]) rows = [["ID", "Type", "Classification", "Version Control URL", "Submitted By"]] for cs in objects: user = requests.get("http://%s%s" % (self.site, cs.get("submitted_by")), auth=self.api_auth) user_detail = user.json() rows.append( [ cs.get("id"), cs.get("type"), cs.get("classification"), cs.get("version_control_url"), user_detail.get("name"), ] ) table.add_rows(rows) print "Changesets That Need To Be Reviewed:" print table.draw()
def containers_to_ascii_table(containers): """Just a method that formats the images to ascii table. Expects dictionary {host: [images]} and prints multiple tables """ with closing(StringIO()) as out: for host, values in containers.iteritems(): out.write("[" + str(host) + "] \n") t = TextTable(max_width=400) t.set_deco(TextTable.HEADER) t.set_cols_dtype(['t'] * 6) t.set_cols_align(["l"] * 6) t.set_cols_width([12, 25, 25, 15, 20, 15]) rows = [] rows.append( ['Id', 'Image', 'Command', 'Created', 'Status', 'Ports']) for container in values: rows.append([ container.id[:12], container.image, container.command[:20], time_ago(container.created), container.status, container.ports ]) t.add_rows(rows) out.write(t.draw() + "\n\n") return out.getvalue()
def do_info_draw_general(self, info_image): table = Texttable(0) table.set_cols_dtype(["a", "t"]) table.set_cols_align(["l", "l"]) table.add_row(["Name", info_image.name]) table.add_row(["Format", info_image.targetFormat.name]) table.add_row(["Id", info_image.dbId]) table.add_row(["Version", info_image.version]) table.add_row(["Revision", info_image.revision]) table.add_row(["Uri", info_image.uri]) self.do_info_draw_source(info_image.parentUri, table) table.add_row(["Created", info_image.created.strftime("%Y-%m-%d %H:%M:%S")]) table.add_row(["Size", size(info_image.fileSize)]) table.add_row(["Compressed", "Yes" if info_image.compress else "No"]) if self.is_docker_based(info_image.targetFormat.format.name): registring_name = None if info_image.status.complete: registring_name = info_image.registeringName table.add_row(["RegisteringName",registring_name]) table.add_row(["Entrypoint", info_image.entrypoint.replace("\\", "")]) self.do_info_draw_generation(info_image, table) print table.draw() + "\n"
def render_instruments_as_table(instruments, display_heading=True): """ Returns ASCII table view of instruments. :param instruments: The instruments to be rendered. :type instruments: :class:`mytardisclient.models.resultset.ResultSet` :param render_format: The format to display the data in ('table' or 'json'). :param display_heading: Setting `display_heading` to True ensures that the meta information returned by the query is summarized in a 'heading' before displaying the table. This meta information can be used to determine whether the query results have been truncated due to pagination. """ heading = "\n" \ "Model: Instrument\n" \ "Query: %s\n" \ "Total Count: %s\n" \ "Limit: %s\n" \ "Offset: %s\n\n" \ % (instruments.url, instruments.total_count, instruments.limit, instruments.offset) if display_heading else "" table = Texttable(max_width=0) table.set_cols_align(["r", 'l', 'l']) table.set_cols_valign(['m', 'm', 'm']) table.header(["ID", "Name", "Facility"]) for instrument in instruments: table.add_row([instrument.id, instrument.name, instrument.facility]) return heading + table.draw() + "\n"
def do_info_draw_publication(self, info_image): printer.out("Information about publications:") pimages = self.api.Users(self.login).Pimages.Getall() table = Texttable(0) table.set_cols_align(["l", "l"]) has_pimage = False for pimage in pimages.publishImages.publishImage: if pimage.imageUri == info_image.uri: has_pimage = True cloud_id = None publish_status = image_utils.get_message_from_status(pimage.status) if not publish_status: publish_status = "Publishing" if publish_status == "Done": cloud_id = pimage.cloudId format_name = info_image.targetFormat.format.name if format_name == "docker" or format_name == "openshift": cloud_id = pimage.namespace + "/" + pimage.repositoryName + ":" + pimage.tagName table.add_row([publish_status, cloud_id]) if has_pimage: table.header(["Status", "Cloud Id"]) print table.draw() + "\n" else: printer.out("No publication")
def top(db): count_query = ''' SELECT count(*) FROM commands WHERE timestamp > ? ''' percentage = 100 / float(execute_scalar(db, count_query, TIMESTAMP)) query = ''' SELECT count(*) AS counts, command FROM commands WHERE timestamp > ? GROUP BY command ORDER BY counts DESC LIMIT 20 ''' table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_align(('r', 'r', 'l')) table.header(('count', '%', 'command')) for row in db.execute(query, (TIMESTAMP,)): table.add_row((row[0], int(row[0]) * percentage, row[1])) print table.draw()
def print_steps(self, show_result=True): def max_len_of_list_of_str(s): return max(len(line) for line in str(s).split('\n')) def autodetect_width(d): widths = [0] * len(d[0]) for line in d: for _i in range(len(line)): widths[_i] = max(widths[_i], max_len_of_list_of_str(line[_i])) return widths if self.save_history: if self.errors: self.history = self.history[:-1] t = Texttable() header = ['№', 'Term', 'Code' ] if self.parallel else ['№', 'Term', 'Code', 'Stack'] data = [header] + [[repr(i) for i in item][:-1] if self.parallel else [repr(i) for i in item] for item in self.history] t.add_rows(data) t.set_cols_align(['l'] + ['r'] * (len(header) - 1)) t.set_cols_valign(['m'] + ['m'] * (len(header) - 1)) t.set_cols_width(autodetect_width(data)) print t.draw() else: if not self.errors: print ' Steps: %10s' % self.iteration if show_result: print 'Result: %10s' % repr(self.term)
def main(args): """ process each argument """ table = Texttable() table.set_cols_align(["r", "r", "r", "r", "r"]) rows = [["Number", "File Name", "File Size", "Video Duration (H:MM:SS)", "Conversion Time"]] total_time = 0.0 total_file_size = 0 for index, arg in enumerate(args, start=1): timer = utils.Timer() with timer: result = resize(arg, (index, len(args))) # result.elapsed_time = timer.elapsed_time() rows.append([index, result.file_name, utils.sizeof_fmt(result.file_size), utils.sec_to_hh_mm_ss(utils.get_video_length(result.file_name)) if result.file_name else "--", "{0:.1f} sec.".format(result.elapsed_time) if result.status else FAILED]) # if rows[-1][-1] != FAILED: total_time += result.elapsed_time total_file_size += result.file_size table.add_rows(rows) print table.draw() print 'Total file size:', utils.sizeof_fmt(total_file_size) print 'Total time: {0} (H:MM:SS)'.format(utils.sec_to_hh_mm_ss(total_time)) print utils.get_unix_date()
def realDataInfo(): """ This function creates a table containing the dimensions of each of the real datasets used in the research study and outputs to a Tex file. """ header = ['Dataset', 'm', 'v'] file_names = ['X50sites', 'Xpitprops', 'wdbc', 'frogs'] data_names = ['Wave Sites', 'Pitprops', 'Breast Cancer Diagnosis', 'Anuran Frog Calls'] rows = [] rows.append(header) for i in range(len(file_names)): mat = read_matrix_from_file('data/realData/{0}.txt'.format(file_names[i])) m, v = mat.shape dataset = data_names[i] rows.append([dataset, m, v]) table = Texttable() table.set_cols_align(["c"] * 3) table.set_deco(Texttable.HEADER | Texttable.VLINES) table.add_rows(rows) outputLatex = latextable.draw_latex(table, caption="Overview of the real data used in this study") # save output to latex file in output/random/ds/glg{dataset}.tex with open('output/real/data_dimensions.tex','w') as file: file.write(outputLatex) with open('output/notLatex/real/data_dimensions.txt','w') as file: file.write(table.draw())
def sg_varEx_table(ds, varEx): """ Accepts results containing selected component index data and the name of the dataset the stochastic greedy algorithms selected the features from. Tabulates results and outputs to a Tex file which can be imported to the Research Article. Args: ds (String): Dataset feature selection was performed on. varEx (Dictionary): Key - Algorithm type, Value - Component indexes of the features selected by that algorithm """ header = ['K', 1] rows = [header] x = 2 for k in varEx.keys(): row = [] row.append(k) header.append(x) x = x + 1 for i in range(len(varEx[k])): row.append(varEx[k][i]) rows.append(row) table = Texttable() table.set_cols_align(["c"] * len(rows[0])) table.set_deco(Texttable.HEADER) table.add_rows(rows) outputLatex = latextable.draw_latex(table, caption="Variance explained by variables selected by the stochastic greedy implementations for the {0} dataset and for k = 1,..,6 the kth selected variable is indicated using the default percentage for random sampling".format(ds)) # save output to latex file in output/random/ds/glg{dataset}.tex with open('output/real/{0}/sgVarEx.tex'.format(ds), 'w') as file: file.write(outputLatex) with open('output/notLatex/real/{0}/sgVarEx.txt'.format(ds),'w') as file: file.write(table.draw())
def display_departures(station_name, limit=10, mode=None): station_name = get_station_name(station_name) departuresJSON = get_departures_by_name(station_name) departures = [] if mode is not None: for d in departuresJSON: if mode.upper() in d['product']: departures += [Departure(d)] else: departures = [Departure(i) for i in departuresJSON] departures = departures[:limit] print('\nStation: ' + station_name + '\n') table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_dtype(['t', 't', 'i']) table.set_cols_align(['l', 'l', 'r']) rows = [] rows.append([ '\x1b[38;5;231m\x1b[48;5;23mline\x1b[0m', 'destination', 'departure (min)' ]) for dep in departures: rows.append([ dep.get_label_colored(), dep.destination, dep.departure_time_minutes ]) table.add_rows(rows) print(color(table.draw(), fore=MVG_FG, back=MVG_BG))
def test_table_update_deaths_down_multi(table_updater_service, stub_bno_dataframe): columns = ["Location", "Cases", "Deaths", "Serious", "Critical", "Recovered", "Notes"] new_data = [ ["Australia", "2", "0", "0", "0", "0", "1 serious"], ["Sweden", "5", "1", "0", "0", "0", ""], ] data_after = pd.DataFrame(new_data, columns=columns) joined_data = stub_bno_dataframe.append(data_after) messages = table_updater_service._make_update_message(joined_data) table = Texttable() table.set_cols_align(["c", "c", "c", "c", "c", "c", "c"]) table.set_cols_valign(["m", "m", "m", "m", "m", "m", "m"]) expected_rows = [ ["Location", "Cases", "Deaths", "Serious", "Critical", "Recovered", "Notes"], ["Australia", "2", "0 (-1)", "0", "0", "0", "1 serious"], ["Sweden", "5", "1 (-1)", "0", "0", "0", ""], ] table.add_rows(expected_rows) expected_table = [f"```{table.draw()}```"] assert expected_table == messages
def _dataframe_to_texttable(df, align=None): """Convert data frame to texttable. Sets column widths to the widest entry in each column.""" ttab = Texttable() ttab.set_precision(1) h = [[x for x in df]] h.extend([x for x in df.to_records(index=False)]) if align: colWidths = [max(len(x), len(".. class:: {}".format(y))) for x,y in izip(df.columns, align)] else: colWidths = [len(x) for x in df.columns] for row in h: for i in range(0, len(row)): if type(row[i]) == str: colWidths[i] = max([len(str(x)) for x in row[i].split("\n")] + [colWidths[i]]) colWidths[i] = max(len(str(row[i])), colWidths[i]) table_data = [] if align: for row in h: table_row = [] i = 0 for col, aln in izip(row, align): table_row.append(".. class:: {}".format(aln) + " " * colWidths[i] + "{}".format(col)) i = i + 1 table_data.append(table_row) else: table_data = h ttab.add_rows(table_data) ttab.set_cols_width(colWidths) # Note: this does not affect the final pdf output ttab.set_cols_align(["r"] * len(colWidths)) return ttab
def show_interfaces(self): ### build table with interfaces table = Texttable() table.set_cols_align(["c", "l", "l", "l", "l", "l"]) data = [["ifIndex","ifDescr","ifAlias","IPadd","AS","OS"]] for line,interface in enumerate(self.snmpDevice[self.host]['interfaces']): ### check if the interface has an ip address if self.snmpDevice[self.host]['interfaces'][interface]['ipAdEntAddr']: data.append([str(self.snmpDevice[self.host]['interfaces'][interface]['ifIndex']), \ minimize(str(self.snmpDevice[self.host]['interfaces'][interface]['ifDescr'])), \ str(self.snmpDevice[self.host]['interfaces'][interface]['ifAlias']), \ str(self.snmpDevice[self.host]['interfaces'][interface]['ipAdEntAddr']), \ str(self.snmpDevice[self.host]['interfaces'][interface]['ifAdminStatus']), \ str(self.snmpDevice[self.host]['interfaces'][interface]['ifOperStatus']), \ ]) else: data.append([str(self.snmpDevice[self.host]['interfaces'][interface]['ifIndex']), \ minimize(str(self.snmpDevice[self.host]['interfaces'][interface]['ifDescr'])), \ str(self.snmpDevice[self.host]['interfaces'][interface]['ifAlias']), \ "--", \ str(self.snmpDevice[self.host]['interfaces'][interface]['ifAdminStatus']), \ str(self.snmpDevice[self.host]['interfaces'][interface]['ifOperStatus']), \ ]) table.add_rows(data, header=True) return table.draw()
def list(self): """List the Drbd volumes and statuses""" # Create table and add headers table = Texttable() table.set_deco(Texttable.HEADER | Texttable.VLINES) table.header(('Volume Name', 'VM', 'Minor', 'Port', 'Role', 'Connection State', 'Disk State', 'Sync Status')) # Set column alignment and widths table.set_cols_width((30, 20, 5, 5, 20, 20, 20, 13)) table.set_cols_align(('l', 'l', 'c', 'c', 'l', 'c', 'l', 'c')) # Iterate over Drbd objects, adding to the table for drbd_object in self.get_all_drbd_hard_drive_object(True): table.add_row((drbd_object.resource_name, drbd_object.vm_object.get_name(), drbd_object.drbd_minor, drbd_object.drbd_port, 'Local: %s, Remote: %s' % (drbd_object._drbdGetRole()[0].name, drbd_object._drbdGetRole()[1].name), drbd_object._drbdGetConnectionState().name, 'Local: %s, Remote: %s' % (drbd_object._drbdGetDiskState()[0].name, drbd_object._drbdGetDiskState()[1].name), 'In Sync' if drbd_object._isInSync() else 'Out of Sync')) return table.draw()
def scrape_score(self): """ Scrape web page, retrieve necessary data, format it and return to the user """ page = requests.get(self.url) parsed_markup = BeautifulSoup(page.text, "html.parser") # final version of the table to send to the user scores = Texttable() # settings for table scores.set_cols_width([10, 1, 10]) scores.set_cols_align(['l', 'c', 'r']) # c - center align (horizontal), l - left, r - right scores.set_cols_valign(['m', 'm', 'm']) # m - middle align (vertical) scores.set_chars(['—', '|', '+', '=']) # replace dash with em dash scores.header(["Home Team", "", "Away Team"]) # scrape needed data from the parsed markup for element in parsed_markup.find_all("div", "row-gray"): match_name_element = element.find(attrs={"class": "scorelink"}) if match_name_element is not None and element.find("div", "sco").get_text().split("-")[0].strip() == "?": home_team = shorten_name(' '.join(element.find("div", "tright").get_text().strip().split(" "))) away_team = shorten_name(' '.join(element.find(attrs={"class": "ply name"}).get_text().strip().split(" "))) scores.add_row([home_team, "-", away_team]) return '`' + scores.draw() + '`'
def images_to_ascii_table(images): """Just a method that formats the images to ascii table. Expects dictionary {host: [images]} and prints multiple tables """ with closing(StringIO()) as out: for host, values in images.iteritems(): out.write(str(host) + "\n") t = TextTable() t.set_deco(TextTable.HEADER) t.set_cols_dtype(['t'] * 5) t.set_cols_align(["l"] * 5) rows = [] rows.append(['Repository', 'Tag', 'Id', 'Created', 'Size']) for image in values: rows.append([ image.repository or '<none>', image.tag or '<none>', image.id[:12], time_ago(image.created), human_size(image.size) ]) t.add_rows(rows) out.write(t.draw() + "\n\n") return out.getvalue()
def list (): api.getCredentials() log.debug ("Command: List.") url = "/gists" gists = api.get(url) public_count = 0 private_count = 0 table = Texttable(max_width=defaults.max_width) table.set_deco(Texttable.HEADER | Texttable.HLINES) table.set_cols_align(["l", "l", "l", "l", "l"]) table.set_cols_width([4, 30, 6, 20, 30]) table.header( ["","Files","Public", "Gist ID", "Description"] ) for (i, gist) in enumerate(gists): private = False file_list = '' for (file, data) in gist['files'].items(): file_list += "'" + file + "' " if gist['public']: public_count += 1 else: private_count += 1 table.add_row( [i+1, file_list, str(gist['public']), gist['id'], gist['description']] ) print(table.draw()) print('') print("You have %i Gists. (%i Private)" % (len(gists), private_count))
def render_schemas_as_table(schemas, display_heading=True): """ Returns ASCII table view of schemas. :param schemas: The schemas to be rendered. :type schemas: :class:`mytardisclient.models.resultset.ResultSet` :param render_format: The format to display the data in ('table' or 'json'). :param display_heading: Setting `display_heading` to True ensures that the meta information returned by the query is summarized in a 'heading' before displaying the table. This meta information can be used to determine whether the query results have been truncated due to pagination. """ heading = "\n" \ "Model: Schema\n" \ "Query: %s\n" \ "Total Count: %s\n" \ "Limit: %s\n" \ "Offset: %s\n\n" \ % (schemas.url, schemas.total_count, schemas.limit, schemas.offset) if display_heading else "" table = Texttable(max_width=0) table.set_cols_align(["r", 'l', 'l', 'l', 'l', 'l', 'l']) table.set_cols_valign(['m', 'm', 'm', 'm', 'm', 'm', 'm']) table.header(["ID", "Name", "Namespace", "Type", "Subtype", "Immutable", "Hidden"]) for schema in schemas: table.add_row([schema.id, schema.name, schema.namespace, schema.type, schema.subtype or '', str(bool(schema.immutable)), str(bool(schema.hidden))]) return heading + table.draw() + "\n"
def format_jobs(result: dict, offset: int, limit: int): """Returns formatted users""" data = result['data'] headers = [ 'id', 'name', 'owner', 'member_count', 'duration', 'status', 'created_at' ] idx_duration = headers.index('duration') rows = [headers] for record in data: row = [record[col] for col in headers] if row[idx_duration] is not None: row[idx_duration] /= 60.0 rows.append(row) headers[idx_duration] = headers[idx_duration] + ' (min)' table = Texttable(max_width=250) table.set_deco(Texttable.HEADER) table.set_cols_align(['r', 'l', 'l', 'r', 'r', 'c', 'r']) table.add_rows(rows) table_text = table.draw() total_count = result['meta']['total_count'] fmt = '{} - {} of {}' page = fmt.format(min(offset, total_count), min(offset + limit, total_count), total_count) return table_text + '\n' + page
def render_datasets_as_table(datasets, display_heading=True): """ Returns ASCII table view of datasets. :param datasets: The datasets to be rendered. :type datasets: :class:`mytardisclient.models.resultset.ResultSet` :param render_format: The format to display the data in ('table' or 'json'). :param display_heading: Setting `display_heading` to True ensures that the meta information returned by the query is summarized in a 'heading' before displaying the table. This meta information can be used to determine whether the query results have been truncated due to pagination. """ heading = "\n" \ "Model: Dataset\n" \ "Query: %s\n" \ "Total Count: %s\n" \ "Limit: %s\n" \ "Offset: %s\n\n" \ % (datasets.url, datasets.total_count, datasets.limit, datasets.offset) if display_heading else "" table = Texttable(max_width=0) table.set_cols_align(["r", 'l', 'l', 'l']) table.set_cols_valign(['m', 'm', 'm', 'm']) table.header(["Dataset ID", "Experiment(s)", "Description", "Instrument"]) for dataset in datasets: table.add_row([dataset.id, "\n".join(dataset.experiments), dataset.description, dataset.instrument]) return heading + table.draw() + "\n"
def display_stats(stats): stats = [[a, s['moves'] / s['wins'], s['default'] / s['wins']] for a, s in stats.items()] table = Texttable() table.set_cols_align(["l", "r", "r"]) table.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES) table.add_rows([["Algo", "Moves", "Defaults"]] + stats) print(table.draw())
def scan_table(scanInstances, scan=None): table = Texttable(800) table.set_cols_dtype(["t", "t", "t", "t", "t"]) table.set_cols_align(["c", "l", "c", "c", "c"]) table.header(["Id", "Name", "Status", "Distribution", "With overlay"]) if scan: table.add_row([scan.dbId, "\t" + scan.name, scan_status(scan), "", ""]) return table for myScannedInstance in scanInstances: withOverlayStr = '' if myScannedInstance.overlayIncluded: withOverlayStr = 'X' table.add_row([ myScannedInstance.dbId, myScannedInstance.name, "", myScannedInstance.distribution.name + " " + myScannedInstance.distribution.version + " " + myScannedInstance.distribution.arch, withOverlayStr ]) scans = generics_utils.order_list_object_by( myScannedInstance.scans.scan, "name") for lscan in scans: table.add_row( [lscan.dbId, "\t" + lscan.name, scan_status(lscan), "", ""]) return table
def _make_text_table(shap_values, normalized_values, pipeline_features, top_k, include_shap_values=False): """Make a table displaying the SHAP values for a prediction. Arguments: shap_values (dict): Dictionary mapping the feature names to their SHAP values. In a multiclass setting, this dictionary for correspond to the SHAP values for a single class. normalized_values (dict): Normalized SHAP values. Same structure as shap_values parameter. top_k (int): How many of the highest/lowest features to include in the table. include_shap_values (bool): Whether to include the SHAP values in their own column. Returns: str """ n_cols = 4 if include_shap_values else 3 dtypes = ["t"] * n_cols alignment = ["c"] * n_cols table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_dtype(dtypes) table.set_cols_align(alignment) header = ["Feature Name", "Feature Value", "Contribution to Prediction"] if include_shap_values: header.append("SHAP Value") rows = [header] rows += _make_rows(shap_values, normalized_values, pipeline_features, top_k, include_shap_values) table.add_rows(rows) return table.draw()
def sub(db, command, *filters): counts = collections.defaultdict(int) user_filter = ' '.join(itertools.chain([command], filters)) total = 0 query = ''' SELECT user_string FROM commands WHERE timestamp > ? AND command = ? ''' for row in db.execute(query, (TIMESTAMP, command)): command = normalize_user_string(row[0]) if command.startswith(user_filter): counts[command] += 1 total += 1 percentage = 100 / float(total) table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_align(('r', 'r', 'l')) table.set_cols_width((5, 6, 75)) table.header(('count', '%', 'command')) for key, value in sorted(counts.iteritems(), key=lambda (k, v): (v, k), reverse=True)[:20]: table.add_row((value, value * percentage, key)) print table.draw()
def render_storage_boxes_as_table(storage_boxes, display_heading=True): """ Returns ASCII table view of storage_boxes. :param storage_boxes: The storage boxes to be rendered. :type storage_boxes: :class:`mtclient.models.resultset.ResultSet` :param render_format: The format to display the data in ('table' or 'json'). :param display_heading: Setting `display_heading` to True ensures that the meta information returned by the query is summarized in a 'heading' before displaying the table. This meta information can be used to determine whether the query results have been truncated due to pagination. """ heading = "\n" \ "Model: StorageBox\n" \ "Query: %s\n" \ "Total Count: %s\n" \ "Limit: %s\n" \ "Offset: %s\n\n" \ % (storage_boxes.url, storage_boxes.total_count, storage_boxes.limit, storage_boxes.offset) if display_heading else "" table = Texttable(max_width=0) table.set_cols_align(["r", 'l', 'l']) table.set_cols_valign(['m', 'm', 'm']) table.header(["ID", "Name", "Description"]) for storage_box in storage_boxes: table.add_row( [storage_box.id, storage_box.name, storage_box.description]) return heading + table.draw() + "\n"
def do_disable(self, args): try: doParser = self.arg_disable() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Disabling user [" + doArgs.account + "] ...") user = self.api.Users(doArgs.account).Get() if user is None: printer.out("user " + doArgs.account + "does not exist", printer.ERROR) else: if user.active == False: printer.out("User [" + doArgs.account + "] is already disabled", printer.ERROR) else: user.active = False self.api.Users(doArgs.account).Update(body=user) printer.out("User [" + doArgs.account + "] is now disabled", printer.OK) if user.active == True: actived = "X" else: actived = "" printer.out("Informations about [" + doArgs.account + "] :") table = Texttable(200) table.set_cols_align(["c", "l", "c", "c", "c", "c", "c", "c"]) table.header( ["Login", "Email", "Lastname", "Firstname", "Created", "Active", "Promo Code", "Creation Code"]) table.add_row([user.loginName, user.email, user.surname, user.firstName, user.created.strftime("%Y-%m-%d %H:%M:%S"), actived, user.promoCode, user.creationCode]) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("In Arguments: " + str(e), printer.ERROR) self.help_disable() except Exception as e: return marketplace_utils.handle_uforge_exception(e)
def print_empty_docs(dataset: StructuredDataset): """ Prints the empty documents in the given dataset. """ # Create table for better printing table = Texttable() table.set_cols_width([30, 15, 10, 10, 10]) table.set_cols_align(['c', 'c', 'c', 'c', 'l']) # Specify header table.set_header_align(['c', 'c', 'c', 'c', 'c']) table.header([ 'Category name', 'Doc index inside category list of docs', 'Num words', 'Document name', 'Content preview' ]) num_empty_docs = 0 for category_name, category_docs in dataset.files_dict.items(): doc_index_in_category = 0 for doc in category_docs: doc_words = doc.content.split() if len(doc_words) == 0: num_empty_docs += 1 num_words_in_doc = len(doc_words) # Add row for each doc that contain the given word table.add_row([ category_name, doc_index_in_category, num_words_in_doc, doc.name, doc.content ]) doc_index_in_category += 1 print(table.draw()) print(" Num empty docs:", num_empty_docs)
def build_table(matches, result_map, headers): table = Texttable() if len(matches) > 1: table.set_deco(Texttable.HEADER | Texttable.HLINES | Texttable.VLINES) table.set_cols_align(['c'] * len(headers)) table.set_cols_valign(['c'] * len(headers)) table.header(headers) for match in matches: print(f'match: {match}: {result_map[match]}') data = [match] data.extend(value for value in result_map[match][0:]) # print(f'Adding row: {data}') table.add_row(data) output = '```' + table.draw() + '```' else: table.set_cols_align(["l", "r"]) table.set_cols_valign(["m", "m"]) table.set_cols_width([10, 20]) table.header([headers[0], matches[0]]) data = list(zip(headers[1:], (result_map[matches[0]])[0:])) table.add_rows(data, header=False) output = '`' + table.draw() + '`' return output
def main(): # locations locations = ast.literal_eval(config['now_location']['locations']) status = ast.literal_eval(config['now_states']['status']) # payload creation count = 0 # while (count < int(sys.argv[1])): while (count < 10): requester = names.get_full_name() location = random.choice(locations) temp = temp_check() ppe = random.choice(status) access = access_check(temp[1], ppe) print(Fore.YELLOW + '\nThermalNet Simulator Data' + Style.RESET_ALL) t = Texttable() t.set_cols_width([20, 10, 8, 8, 8, 8]) t.set_cols_align(['c', 'c', 'c', 'c', 'c', 'c']) t.add_rows( [['requester', 'location', 'temp', 'temp ok', 'ppe ok', 'access'], [requester, location, temp[0], temp[1], ppe, access[0]]]) print(t.draw()) now(requester, location, temp[0], temp[1], ppe, access[0]) # time.sleep(cycle_time) print('\n') # visual cycle time for i in tqdm(range(cycle_time)): time.sleep(1) count = count + 1
def summary( self, deco: int = Texttable.BORDER, cols_align: List[str] = ["l", "l", "l"], cols_valign: List[str] = ["t", "t", "t"]) -> TypeVar("DataloaderCollator"): r"""Get summary of trainer. Args: deco (int): Border of texttable cols_align (List[str]): List of string of columns' align cols_valign (List[str]): List of string of columns' valign Returns: torecsys.data.dataloader.DataloaderCollator: self """ # create and configure text table t = Texttable() t.set_deco(deco) t.set_cols_align(cols_align) t.set_cols_valign(cols_valign) # append data to text table t.add_rows( [["Field Name: ", "Field Type: ", "Arguments: "]] + \ [[k, v, ", ".join(self.kwargs.get(k, {}).keys())] \ for k, v in self.schema.items()] ) # Print summary with text table print(t.draw()) return self
def do_list(self, args): try: doParser = self.arg_list() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Getting roles and their entitlements for user [" + doArgs.account + "]:\n") roles = self.api.Users(doArgs.account).Roles.Getall() table = Texttable(200) table.set_cols_align(["l", "l"]) table.header(["Name", "Description"]) table.set_cols_width([30,60]) for role in roles.roles.role: table.add_row([role.name.upper(), role.description]) for entitlement in role.entitlements.entitlement: table.add_row(["===> " + entitlement.name, entitlement.description]) printer.out("Role entitlements are represented with \"===>\".", printer.INFO) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("In Arguments: "+str(e), printer.ERROR) self.help_list() except Exception as e: return handle_uforge_exception(e)
def do_list(self, args): try: doParser = self.arg_list() doArgs = doParser.parse_args(shlex.split(args)) printer.out("Getting roles and their entitlements for user [" + doArgs.account + "]:\n") roles = self.api.Users(doArgs.account).Roles.Getall() table = Texttable(200) table.set_cols_align(["l", "l"]) table.header(["Name", "Description"]) table.set_cols_width([30, 60]) for role in roles.roles.role: table.add_row([role.name.upper(), role.description]) for entitlement in role.entitlements.entitlement: table.add_row( ["===> " + entitlement.name, entitlement.description]) printer.out("Role entitlements are represented with \"===>\".", printer.INFO) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("In Arguments: " + str(e), printer.ERROR) self.help_list() except Exception as e: return handle_uforge_exception(e)
def showTable(self): neighbors_id = [i[1] for i in self.neighbors] table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_dtype(["t", "t", "t", "t"]) table.set_cols_align(["l", "l", "l", "l"]) rows = [] rows.append([u"movie ID", u"Name", u"release", u"from userID"]) for item in self.recommandList: fromID = [] for i in self.movies: if i[0] == item[1]: movie = i break for i in self.ItemUser[item[1]]: if i in neighbors_id: fromID.append(i) movie.append(fromID) rows.append(movie) table.add_rows(rows) print(table.draw())
def print_matrix(m: np.ndarray, head: np.ndarray = None, title: str = "", c_type: str = 'a') -> None: """ display matrix :param m: :param head: head matrix :param title: title matrix :param c_type: :return: """ cols_align = [] cols_m = m.shape[1] rows_m = m.shape[0] for i in range(0, cols_m): if i == 0: cols_align.append("l") else: cols_align.append("r") content = [] if head is None: head = [' ' for x in range(0, cols_m)] content.append(head) for i in range(0, rows_m): content.append(m[i]) table = Texttable() table.set_deco(Texttable.HEADER) table.set_header_align(cols_align) table.set_cols_dtype([c_type] * cols_m) # automatic table.set_cols_align(cols_align) table.add_rows(content) if title != "": print("********************** " + title + " **********************") print(table.draw())
def to_string(self): from texttable import Texttable table = Texttable() header = [""] shapes = {k: [k] for k in self._targets.keys()} for step in self._steps[1:]: header.append(step) for target in self._targets.keys(): v = str(self._targets[target].get(step, "n/a")) shapes[target].append(v) table.header(header) table.set_cols_dtype(["t"] * len(header)) table.set_cols_align(["l"] * len(header)) table.set_deco(Texttable.HEADER) for target in self._targets.keys(): table.add_row(shapes[target]) msg = table.draw() msg = self._add_caption(msg, "-", "Table: Data layout transformation.") return msg
def __init__(self): '''Shows a table Integrate Numerically the Student’s t-distribution 'probability density 'function ( t-distribution pdf) using Simpson’s' 'rule. The total' 'probability' is the area of the function (the integral)' 'from -t to t. The total probability is p' ''' calculate = Calculate('history.txt') values = calculate.get_total_probability_p() table = Texttable() table.set_cols_align(["l", "c", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_dtype(['t', 'i', 'f']) table.set_precision(5) label = [ color(bcolors.GREEN,"Test"),'', color(bcolors.GREEN,"Expected Values")] head = [ color(bcolors.GREEN,"t"), color(bcolors.GREEN,"dof"), color(bcolors.GREEN,"p")] rows = [] rows.append(label) rows.append(head) [rows.append(element) for element in values] table.add_rows(rows) print(table.draw() + "\n")
def process(health, armor, dodge, defense): weights = { 'hitpoint': 1., 'armor': one_armor(health, armor, dodge, defense), 'stamina': one_stamina(), 'agility': one_agility(health, armor, dodge, defense), 'dodge': one_dodge(health, armor, dodge, defense), 'defense': one_defense(health, armor, dodge, defense) } status = Texttable() status.header(['health', 'armor', 'dodge', 'defense']) status.set_cols_align(['r' for i in range(4)]) status.add_row(['%d' % health, '%d' % armor, '%.2f%%' % (dodge * 100.), '%d' % defense]) print(status.draw()) table = Texttable() dims = ['armor', 'stamina', 'agility', 'dodge', 'defense'] table.header([''] + dims) table.set_cols_dtype(['t' for i in range(len(dims) + 1)]) table.set_cols_align(['r' for i in range(len(dims) + 1)]) for att in ['hitpoint'] + dims: row = [att] for col in dims: try: row.append('%.6f' % (weights[att] / weights[col])) except ZeroDivisionError: row.append('inf') table.add_row(row) print(table.draw())
def make_new(): table = Texttable() table.set_deco(Texttable.HEADER) table.set_cols_dtype(["t", "t"]) table.set_cols_align(["l", "l"]) table.add_row(get_txt(ctx.guild.id, "ws_list_row")) return table
def list(self): """List the Drbd volumes and statuses""" # Create table and add headers table = Texttable() table.set_deco(Texttable.HEADER | Texttable.VLINES) table.header(('Volume Name', 'VM', 'Minor', 'Port', 'Role', 'Connection State', 'Disk State', 'Sync Status')) # Set column alignment and widths table.set_cols_width((30, 20, 5, 5, 20, 20, 20, 13)) table.set_cols_align(('l', 'l', 'c', 'c', 'l', 'c', 'l', 'c')) # Iterate over Drbd objects, adding to the table for drbd_object in self.get_all_drbd_hard_drive_object(True): table.add_row( (drbd_object.resource_name, drbd_object.vm_object.get_name(), drbd_object.drbd_minor, drbd_object.drbd_port, 'Local: %s, Remote: %s' % (drbd_object._drbdGetRole()[0].name, drbd_object._drbdGetRole()[1].name), drbd_object._drbdGetConnectionState().name, 'Local: %s, Remote: %s' % (drbd_object._drbdGetDiskState()[0].name, drbd_object._drbdGetDiskState()[1].name), 'In Sync' if drbd_object._isInSync() else 'Out of Sync')) return table.draw()
def dashboard_format(self, records): """Format modeled records in dashboard format. Args: records: Modeled records to format. Returns: str: Record data in dashboard format. """ title = util.hline(self.title_fmt % {'model_name': records[0].name.capitalize(), 'storage_path': records[0].storage}, 'cyan') header_row = [col['header'] for col in self.dashboard_columns] rows = [header_row] for record in records: populated = record.populate() row = [] for col in self.dashboard_columns: if 'value' in col: try: cell = populated[col['value']] except KeyError: cell = 'N/A' elif 'yesno' in col: cell = 'Yes' if populated.get(col['yesno'], False) else 'No' elif 'function' in col: cell = col['function'](populated) else: raise InternalError("Invalid column definition: %s" % col) row.append(cell) rows.append(row) table = Texttable(logger.LINE_WIDTH) table.set_cols_align([col.get('align', 'c') for col in self.dashboard_columns]) table.add_rows(rows) return [title, table.draw(), '']
def generate_table(region): ec2 = boto3.client(service_name='ec2', region_name=region) response = ec2.describe_instances() table = Texttable() table.set_cols_align(["c", "c", "c", "c", "c"]) table.set_cols_valign(["m", "m", "m", "m", "m"]) #tablearray = [] #tablearray.append(["Instance ID", "Name", "State", "VolumeId"]) if response['Reservations']: table.header(["Instance ID", "Name", "State", "VolumeId", "DeviceName"]) for r in response['Reservations']: for i in r['Instances']: instanceid = i['InstanceId'] ec2 = boto3.resource('ec2', region) Instance = ec2.Instance(i['InstanceId']) if Instance.tags: for tag in Instance.tags: if tag['Key'] == 'Name': name = tag['Value'] #There may be another condition here else: name = " " state = i['State']['Name'] vs = '' dn = '' for d in i['BlockDeviceMappings']: vs = vs + d['Ebs']['VolumeId'] + '\n' dn = dn + d['DeviceName'] + '\n' table.add_row([instanceid , name , state , vs, dn]) print(table.draw() + "\n") else: print(' No instances found in this region\n')
def test_colored(): table = Texttable() table.set_cols_align(["l", "r", "c"]) table.set_cols_valign(["t", "m", "b"]) table.add_rows([ [get_color_string(bcolors.GREEN, "Name Of Person"), "Age", "Nickname"], ["Mr\nXavier\nHuon", 32, "Xav'"], [get_color_string(bcolors.BLUE,"Mr\nBaptiste\nClement"), 1, get_color_string(bcolors.RED,"Baby")] ]) expected_output = dedent(""" +----------------+-----+----------+ | [92mName Of Person[0m | Age | Nickname | +================+=====+==========+ | Mr | | | | Xavier | 32 | | | Huon | | Xav' | +----------------+-----+----------+ | [94mMr[0m | | | | [94mBaptiste[0m | 1 | | | [94mClement[0m | | [91mBaby[0m | +----------------+-----+----------+ """).strip('\n') assert table.draw() == expected_output
def do_promote(self, args): try: doParser = self.arg_promote() doArgs = doParser.parse_args(shlex.split(args)) orgSpecified = org_utils.org_get(api=self.api, name=doArgs.org) adminUser = self.api.Users(doArgs.account).Get() if adminUser == None: printer.out("User [" + doArgs.account + "] doesn't exist.", printer.ERROR) else: self.api.Orgs(orgSpecified.dbId).Members(adminUser.loginName).Change(Admin=True, body=adminUser) printer.out("User [" + doArgs.account + "] has been promoted in [" + orgSpecified.name + "] :", printer.OK) if adminUser.active == True: active = "X" else: active = "" printer.out("Informations about [" + adminUser.loginName + "] :") table = Texttable(200) table.set_cols_align(["c", "l", "c", "c", "c", "c", "c", "c"]) table.header( ["Login", "Email", "Lastname", "Firstname", "Created", "Active", "Promo Code", "Creation Code"]) table.add_row([adminUser.loginName, adminUser.email, adminUser.surname, adminUser.firstName, adminUser.created.strftime("%Y-%m-%d %H:%M:%S"), active, adminUser.promoCode, adminUser.creationCode]) print table.draw() + "\n" return 0 except ArgumentParserError as e: printer.out("In Arguments: " + str(e), printer.ERROR) self.help_promote() except Exception as e: return marketplace_utils.handle_uforge_exception(e)
def launch(self): while self.playing: clear() print ( "=============================================\n Welcome in " + self.name + "\n=============================================\n" ) t = Texttable() t.set_cols_align(["c", "c"]) title = ["Choice", "Select what to do"] t.add_rows([title, [" 1", "Start a new game"]]) t.add_rows([title, [" 2", "Artificial intellignece settings"]]) t.add_rows([title, [" 0", "Exit"]]) print t.draw() while 1: try: a = input("Choice : ") break except: print "Please type 1, 2 or 0" if a == 2: self.displaySettings() elif a: self.startGame() else: self.playing = 0