def get_gists(user): github_gists_url = 'https://api.github.com/users/%s/gists' % (user) gists = requests.get(github_gists_url) try: data = json.loads(gists.content) except ValueError: raise ValueError(gists.content) for gist in data: width = [[colored.yellow('#'+str(gist['id'])), 2],] width.append(['{0}'.format(gist['description']), 70]) if len(gist['files']) == 1: for file in gist['files']: width.append([colored.blue("size:"), 6]) width.append(["{0}".format(gist['files'][file]['size']), 8]) width.append([colored.blue("language:"), 10]) width.append(["{0}".format(gist['files'][file]['language']), 15]) print columns(*width) if (len(gist['files']) != 1): for file in gist['files']: width = [["",2]] width.append([colored.blue("file:"), 6]) width.append([file, 25]) width.append([colored.blue("size:"), 6]) width.append(["{0}".format(gist['files'][file]['size']), 6]) width.append([colored.blue("language:"), 10]) width.append(["{0}".format(gist['files'][file]['language']), 10]) print columns(*width)
def print_lines (self): from clint.textui import columns, puts, colored widths = [12, 9, 32, 32] headers = ['Freq', 'Call', 'Time', 'Source'] puts(columns(*map(list, zip(map(colored.red, headers), widths)))) for spot in sorted(self.spots, key=lambda x: x.freq): puts(columns(*map(list, zip([str(spot.freq), spot.call, str(datetime.now()-spot.time+timedelta(hours=1)), str(spot.source)], widths))))
def print_results(item): if isinstance(item, dict): puts(columns([item['id'].split('.')[-1], col], [statused[item['status']], col])) else: puts(columns([item[0], col], [statused.get(item[1], item[1]), col]))
def print_containers(containers, to_json=False): containers = sorted(containers, key=lambda c: c.name) if to_json: d = [c.to_dict() for c in containers] puts(json.dumps(d, indent=2, sort_keys=True, separators=(',', ': '))) else: puts( colored.blue( columns(["NODE", 15], ["CONTAINER ID", 15], ["STATUS", 7], ["IP", 15], ["NETWORK", 10], ["PARTITION", 10]))) def partition_label(c): if c.holy: return "H" elif c.partition: if c.neutral: return str(c.partition) + " [N]" else: return str(c.partition) elif c.neutral: return "N" else: return "" for container in containers: puts( columns([container.name, 15], [container.container_id[:12], 15], [container.status, 7], [container.ip_address or "", 15], [container.network_state, 10], [partition_label(container), 10]))
def cmd_install(args): """Installs legit git aliases.""" aliases = [ 'branches', 'graft', 'harvest', 'publish', 'unpublish', 'sprout', 'sync', 'switch', 'resync', ] print 'The following git aliases have been installed:\n' for alias in aliases: cmd = '!legit ' + alias os.system('git config --global --replace-all alias.{0} "{1}"'.format( alias, cmd)) print columns(['', 1], [colored.yellow('git ' + alias), 20], [cmd, None]) sys.exit()
def print_containers(containers, to_json=False): containers = sorted(containers, key=lambda c: c.name) if to_json: d = [c.to_dict() for c in containers] puts(json.dumps(d, indent=2, sort_keys=True, separators=(",", ": "))) else: puts( colored.blue( columns( ["NODE", 15], ["CONTAINER ID", 15], ["STATUS", 7], ["IP", 15], ["NETWORK", 10], ["PARTITION", 10] ) ) ) for container in containers: partition = container.partition partition = "" if partition is None else str(partition) puts( columns( [container.name, 15], [container.container_id[:12], 15], [container.state, 7], [container.ip_address or "", 15], [container.network_state, 10], [partition, 10], ) )
def listall(owner=None, repo=None): path = "{0}repositories/{1}/{2}/pullrequests/".format( BASE_URL_V2, owner, repo) res = requests.get(path, auth=(USERNAME, PASSWORD)) if res.content: pulls = json.loads(res.content).get('values') # print pulls puts( colored.magenta( columns(['Id', 5], ['Status', 8], [str('Repository'), 12], ['Source branch', 45], ['Destination branch', 35], ['Created on', 35]))) for pull in pulls: puts( colored.green( columns([str(pull['id']), 5], [pull['state'], 8], [str(pull['source']['repository']['name']), 12], [pull['source']['branch']['name'], 45], [pull['destination']['branch']['name'], 35], [pull['created_on'], 35]))) else: puts( colored.red( 'Sorry, there is no pull request for this repository.'))
def list_existing_accounts(): col = 20 all_accounts = account_store.get_all_accounts() if len(all_accounts) == 0: puts(colored.red("There is no linked account yet.")) return puts(colored.green("You have linked the following account(s) to onedrive-d:\n")) puts( columns( [(colored.red("Index")), 10], [(colored.magenta("Account ID")), col], [(colored.cyan("Account Type")), col], [(colored.green("Name")), None], ) ) account_list = [] for id, account in all_accounts.items(): puts( columns( [str(len(account_list)), 10], [account.profile.user_id, col], [account.TYPE, col], [account.profile.name, None], ) ) account_list.append(account) puts(colored.yellow("\nTo delete an account, type the index and hit [Enter]. Otherwise hit [Ctrl+C] to break.")) puts(colored.yellow("Note: all the Drives belonging to the account will also be deleted.")) puts() try: prompt_delete_account(account_list) except KeyboardInterrupt: puts(colored.green("Aborted."))
def display_available_branches(): """Displays available branches.""" branches = get_branches() if not branches: print colored.red('No branches available') return branch_col = len(max([b.name for b in branches], key=len)) + 1 for branch in branches: try: branch_is_selected = (branch.name == repo.head.ref.name) except TypeError: branch_is_selected = False marker = '*' if branch_is_selected else ' ' color = colored.green if branch_is_selected else colored.yellow pub = '(published)' if branch.is_published else '(unpublished)' print columns( [colored.red(marker), 2], [color(branch.name), branch_col], [black(pub), 14] )
def list_existing_accounts(): col = 20 all_accounts = account_store.get_all_accounts() if len(all_accounts) == 0: puts(colored.red('There is no linked account yet.')) return puts( colored.green( 'You have linked the following account(s) to onedrive-d:\n')) puts( columns([(colored.red('Index')), 10], [(colored.magenta('Account ID')), col], [(colored.cyan('Account Type')), col], [(colored.green('Name')), None])) account_list = [] for id, account in all_accounts.items(): puts( columns([str(len(account_list)), 10], [account.profile.user_id, col], [account.TYPE, col], [account.profile.name, None])) account_list.append(account) puts( colored.yellow( '\nTo delete an account, type the index and hit [Enter]. Otherwise hit [Ctrl+C] to break.' )) puts( colored.yellow( 'Note: all the Drives belonging to the account will also be deleted.' )) puts() try: prompt_delete_account(account_list) except KeyboardInterrupt: puts(colored.green('Aborted.'))
def status(arguments): output = StringIO() verbose = initialise(arguments) environments = arguments['<environment>'] if environments: puts(columns( [(colored.green('Nickname')), 15], [(colored.green('Instance Type')), 10], [(colored.green('Status')), 20], [(colored.green('Instance href')), 60], ), stream=output.write) for environment in environments: server = righteous.find_server(environment) if server: settings = righteous.server_settings(server['href']) if verbose: server_info = righteous.server_info(server['href']) puts('Server Info:\n' + colored.cyan(pformat(server_info))) puts('Server Settings:\n' + colored.cyan(pformat(settings))) puts(columns( [environment, 15], [settings['ec2-instance-type'], 10], [server['state'] if server else 'Found', 20], [server['href'] if server else 'Not', 60], ), stream=output.write) else: puts(colored.red('%s: Not Found' % environment), stream=output.write) print output.getvalue()
def status(self): self.check_vms_are_running() dns_started = docker.container_running('docker_dns') puts( columns([(colored.green('VM')), 16], [(colored.green('HostName' if dns_started else 'IP')), 38], [(colored.green('Ports')), 25], [(colored.green('Image')), 25], [(colored.green('Docker ID')), 15], [(colored.green('Docker Name')), 25])) puts( columns(['-' * 16, 16], ['-' * 38, 38], ['-' * 25, 25], ['-' * 25, 25], ['-' * 15, 15], ['-' * 25, 25])) for vm_id, vm_data in self.vms.items(): if vm_data['ip'] == '': continue puts( columns([vm_data['compose_name'], 16], [ '{}.docker'.format(vm_data['name']) if dns_started else vm_data['ip'], 38 ], [', '.join(vm_data['ports']), 25], [vm_data['image'], 25], [vm_id[:12], 15], [vm_data['name'], 25]))
def display(self, entries): objectnamePrompt, objectnamePromptHR, objectsubnamePrompt, objectsubnamePromptHR = self._getObjectNames( ) width = 0 attrs = self._inventory.getAttributes() for k, v in attrs.iteritems(): width = max(width, len(v["attr_name"])) width = width + 1 if width > 40: width = 40 c = 0 for entry in self._inventory.lookupAttributes(entries): c = c + 1 puts( columns(["ID:", width], [colored.green(str(entry["object_id"])), None])) puts( columns([objectnamePromptHR, width], [colored.green(str(entry[objectnamePrompt])), None])) if objectsubnamePrompt: puts( columns( [objectsubnamePromptHR, width], [colored.green(str(entry[objectsubnamePrompt])), None ])) for k, v in entry["attributes"].iteritems(): puts( columns([attrs[k]["attr_name"] + ":", width], [str(v), None])) puts() puts(colored.yellow(str(c) + " entries found."))
def list_existing_drives(): puts(colored.green('List registered Drives for editing / deleting...\n')) with indent(4, quote=' >'): puts('To edit a Drive, type the index of the Drive in the table.') puts('To delete a Drive, type a minus sign followed by the index of the Drive.') puts('To abort and return to main menu, hit [Ctrl+C].') puts('For example, type "1" to edit the Drive indexed 1, and type "-1" to delete it.') puts() account_store.get_all_accounts() drive_list = [] for key, drive in drive_store.get_all_drives().items(): drive_id, account_id, account_type = key with indent(4): puts(columns( [(colored.green('Index')), 8], [(colored.magenta('Drive ID')), 17], [(colored.magenta('Drive Type')), 12], [(colored.cyan('Account')), 20], [(colored.yellow('Local Root')), None])) profile = drive.root.account.profile puts(columns( [str(len(drive_list)), 8], [drive_id, 17], [drive.type, 12], ["{} ({})".format(account_id, profile.name), 20], [drive.config.local_root, None])) drive_list.append(drive) prompt_edit_drive(drive_list)
def cmd_settings(args): """Opens legit settings in editor.""" path = clint.resources.user.open('config.ini').name print 'Legit Settings:\n' for (option, _, description) in settings.config_defaults: print columns([colored.yellow(option), 25], [description, None]) print '\nSee {0} for more details.'.format(settings.config_url) sleep(0.35) if is_osx: editor = os.environ.get('EDITOR') or os.environ.get('VISUAL') or 'open' os.system("{0} '{1}'".format(editor, path)) elif is_lin: editor = os.environ.get('EDITOR') or os.environ.get('VISUAL') or 'pico' os.system("{0} '{1}'".format(editor, path)) elif is_win: os.system("'{0}'".format(path)) else: print "Edit '{0}' to manage Legit settings.\n".format(path) sys.exit()
def print_options(): ''' Print financial data options ''' option_w = 6 func_w = 30 puts(columns(['Option', option_w], ['Function', func_w])) for option in options_list.items(): puts(columns([option[0], option_w], [option[1], func_w]))
def list_existing_drives(): puts(colored.green('List registered Drives for editing / deleting...\n')) with indent(4, quote=' >'): puts('To edit a Drive, type the index of the Drive in the table.') puts( 'To delete a Drive, type a minus sign followed by the index of the Drive.' ) puts('To abort and return to main menu, hit [Ctrl+C].') puts( 'For example, type "1" to edit the Drive indexed 1, and type "-1" to delete it.' ) puts() account_store.get_all_accounts() drive_list = [] for key, drive in drive_store.get_all_drives().items(): drive_id, account_id, account_type = key with indent(4): puts( columns([(colored.green('Index')), 8], [(colored.magenta('Drive ID')), 17], [(colored.magenta('Drive Type')), 12], [(colored.cyan('Account')), 20], [(colored.yellow('Local Root')), None])) profile = drive.root.account.profile puts( columns([str(len(drive_list)), 8], [drive_id, 17], [drive.type, 12], ["{} ({})".format(account_id, profile.name), 20], [drive.config.local_root, None])) drive_list.append(drive) prompt_edit_drive(drive_list)
def process_shapiro(results): width = COLUMN_WIDTH + 2 header = [[name, width] for name in ['Length', 'N', 'Statistic W', 'p-value']] puts(columns(*header)) for res in sorted(results): row = [[str(v), width] for v in [res[0][1], res[0][0], res[1][0], res[1][1]]] puts(columns(*row))
def process_correlations(results): width = COLUMN_WIDTH + 2 header = [[name, width] for name in ['Length', 'N', 'Spearman', 'Pearson']] puts(columns(*header)) for res in sorted(results): row = [[str(v), width] for v in [res[0][1], res[0][0], res[1], res[2]]] puts(columns(*row))
def print_containers(containers, to_json=False): containers = sorted(containers, key=lambda c: c.name) if to_json: d = [c.to_dict() for c in containers] puts(json.dumps(d, indent=2, sort_keys=True, separators=(',', ': '))) else: puts(colored.blue(columns(["NODE", 15], ["CONTAINER ID", 15], ["STATUS", 7], ["IP", 15], ["NETWORK", 10], ["PARTITION", 10]))) def partition_label(c): if c.holy: return "H" elif c.partition: if c.neutral: return str(c.partition) + " [N]" else: return str(c.partition) elif c.neutral: return "N" else: return "" for container in containers: puts(columns([container.name, 15], [container.container_id[:12], 15], [container.state, 7], [container.ip_address or "", 15], [container.network_state, 10], [partition_label(container), 10]))
def status(self): self.check_vms_are_running() puts(columns( [(colored.green('VM')), 20], [(colored.green('IP')), 15], [(colored.green('Ports')), 30], [(colored.green('Image')), 30], [(colored.green('Docker ID')), 15], [(colored.green('Docker Name')), 25] )) puts(columns( ['-'*20, 20], ['-'*15, 15], ['-'*30, 30], ['-'*30, 30], ['-'*15, 15], ['-'*25, 25] )) for vm_id, vm_data in self.vms.items(): if vm_data['ip'] == '': continue puts(columns( [vm_data['compose_name'], 20], [vm_data['ip'], 15], [', '.join(vm_data['ports']), 30], [vm_data['image'], 30], [vm_id[:12], 15], [vm_data['name'], 25] ))
def _print_pretty(self, filename): print(colored.magenta(Figlet(font='mini').renderText('ATS_Hacker'))) print(f"Document: {colored.magenta(filename)}\n") col = 20 print(columns([(colored.red('Word')), col], [(colored.blue('Occurances')), col])) for word, count in self.keyword_counts.items(): print(columns([word, col], [str(count), col])) print()
def _handle_correlations(manager, var_x, var_y, names): col_width = COLUMN_WIDTH + 2 results = manager.get_correlations(var_x, var_y) header = [[col, col_width] for col in names] puts(columns(*header)) for r in results: values = [v for v in r[0]] values.extend([r[1][0], r[1][1], r[2][0], r[2][1]]) row = [[str(x), col_width] for x in values] puts(columns(*row))
def process_experiments_results(results): exclude = [] header = [[key, COLUMN_WIDTH] for key in results['data'][0].keys() if not key in exclude] puts(columns(*header)) for res in results['data']: row = [] for k, v in res.iteritems(): if not k in exclude: row.append([_format_value(v), COLUMN_WIDTH]) puts(columns(*row))
def list_test_sets(): result = client.testsets().json() col = 60 puts(columns([(colored.red("ID")), col], [(colored.red("NAME")), None])) for test_set in result: puts(columns([test_set['id'], col], [test_set['name'], None])) return 0
def _print_status_headers(): """Display messages for stakkr status (header)""" puts( columns([(colored.green('Container')), 16], [colored.green('IP'), 15], [(colored.green('Url')), 32], [(colored.green('Image')), 32], [(colored.green('Docker ID')), 15], [(colored.green('Docker Name')), 25])) puts( columns(['-' * 16, 16], ['-' * 15, 15], ['-' * 32, 32], ['-' * 32, 32], ['-' * 15, 15], ['-' * 25, 25]))
def _print_status_headers(self): puts( columns([(colored.green('Container')), 16], [colored.green('IP'), 15], [(colored.green('Url')), 32], [(colored.green('Image')), 32], [(colored.green('Docker ID')), 15], [(colored.green('Docker Name')), 25])) puts( columns(['-' * 16, 16], ['-' * 15, 15], ['-' * 32, 32], ['-' * 32, 32], ['-' * 15, 15], ['-' * 25, 25]))
def get_issues(user, repo, assigned=None): github_issues_url = 'https://api.github.com/repos/%s/%s/issues' % (user, repo) params = None if assigned: params = {'assignee': user} link = requests.head(github_issues_url).headers.get( 'Link', '=1>; rel="last"') last = lambda url: int( re.compile('=(\d+)>; rel="last"$').search(url).group(1)) + 1 for pagenum in xrange(1, last(link)): connect = requests.get(github_issues_url + '?page=%s' % pagenum, params=params) try: data = json.loads(connect.content) except ValueError: raise ValueError(connect.content) if not data: puts('{0}. {1}'.format( colored.blue('octogit'), colored.cyan( 'Looks like you are perfect welcome to the club.'))) break elif 'message' in data: puts('{0}. {1}'.format(colored.blue('octogit'), colored.red(data['message']))) sys.exit(1) for issue in data: #skip pull requests try: if issue['pull_request']['html_url']: continue width = [ [colored.yellow('#' + str(issue['number'])), 4], ] if isinstance(issue['title'], unicode): issue['title'] = issue['title'].encode('utf-8') width.append([issue['title'], 80]) width.append( [colored.red('(' + issue['user']['login'] + ')'), None]) print columns(*width) except IndexError as err: puts('{0}.Error: {1} triggered -- {2}'.format( colored.blue('octogit'), colored.red('Keyerror'), colored.red(err)))
def _print_status_headers(self, dns_started: bool): host_ip = (colored.green('HostName' if dns_started else 'IP')) puts( columns([(colored.green('Container')), 16], [host_ip, 25], [(colored.green('Ports')), 25], [(colored.green('Image')), 32], [(colored.green('Docker ID')), 15], [(colored.green('Docker Name')), 25])) puts( columns(['-' * 16, 16], ['-' * 25, 25], ['-' * 25, 25], ['-' * 32, 32], ['-' * 15, 15], ['-' * 25, 25]))
def _print_status_headers(): """Display messages for stakkr status (header)""" puts(columns( [(colored.green('Container')), 16], [colored.green('IP'), 15], [(colored.green('Url')), 32], [(colored.green('Image')), 32], [(colored.green('Docker ID')), 15], [(colored.green('Docker Name')), 25] )) puts(columns( ['-'*16, 16], ['-'*15, 15], ['-'*32, 32], ['-'*32, 32], ['-'*15, 15], ['-'*25, 25] ))
def list_tests(): result = client.tests().json() tests = (test for test in result if test['testset'] == test_set) col = 60 puts(columns([(colored.red("ID")), col], [(colored.red("NAME")), None])) for test in tests: test_id = test['id'].split('.')[-1] puts(columns([test_id, col], [test['name'], None])) return 0
def process_results_from_db(results): for res in results: t = res.pop('timestamp') params = res.pop('params') puts(colored.green('\n==%s (%s)' % (_str_dict(params), datetime.datetime.fromtimestamp(t)))) fields = sorted(res.keys()) header = [['No', COLUMN_WIDTH]] header.extend([[k, COLUMN_WIDTH] for k in fields]) puts(columns(*header)) results_num = len(res[fields[0]]) for i in range(results_num): row = [[str(i + 1), COLUMN_WIDTH]] row.extend([[_format_value(res[name][i]), COLUMN_WIDTH] for name in fields]) puts(columns(*row))
def table(mat, header=False, col_sep=3, indent_table=1, transpose=False): """Print fancy table Argument -------- mat -- list of sequences of strings >>> mat = [['animal', 'couleur', 'age'], ['chien', 'sable', '3'], ['chat', 'noir', '7']] """ mat_shape = len(mat), len(mat[0]) if transpose: mat = [[mat[i][j] for i in range(mat_shape[0])] for j in range(mat_shape[1])] nbr_cols = len(mat[0]) for line in mat: if len(line) != nbr_cols: raise FancyTableError('Unadequate number of columns') # detect maximum size for each column size_cols = nbr_cols * [0] for line in mat: for index, item in enumerate(line): if len(item) > size_cols[index]: size_cols[index] = len(item) # right justify elements, line per line wmat = [] for index, line in enumerate(mat): justified_line = [] for j, item in enumerate(line): justified_line.append(item.rjust(size_cols[j])) wmat.append(justified_line) # reajust col size given col_sep size_cols = [sc + col_sep - 1 for sc in size_cols] # write table with indent(indent_table, quote=''): start = 0 if header: elems = tuple(map(list, zip(wmat[0], size_cols))) puts(columns(*elems)) start = 1 underlines = tuple( map(list, zip([len(item) * '-' for item in wmat[0]], size_cols))) puts(columns(*underlines)) for line in wmat[start:]: elems = tuple(map(list, zip(line, size_cols))) puts(columns(*elems)) puts() # blank line at the end return
def _print_status_headers(self, dns_started: bool): host_ip = (colored.green('HostName' if dns_started else 'IP')) puts(columns( [(colored.green('Container')), 16], [host_ip, 25], [(colored.green('Ports')), 25], [(colored.green('Image')), 32], [(colored.green('Docker ID')), 15], [(colored.green('Docker Name')), 25] )) puts(columns( ['-'*16, 16], ['-'*25, 25], ['-'*25, 25], ['-'*32, 32], ['-'*15, 15], ['-'*25, 25] ))
def help(): col1 = 30 col2 = 40 puts('\nusage: bootstrap-cli.py [file or directory] [--bootstrap]') puts('\nlist of commands:') with indent(2): puts(columns([':exit', col1],['Exit Bottlenose', col2])) puts(columns([':load <file or directory>', col1],['Load a .bottle file or directory', col2])) puts(columns([':context', col1],['Switch between contexts', col2])) puts(columns([':help', col1],['Bring back this help info', col2])) puts('\nfor a guide to script syntax go to:') with indent(2): puts('https://github.com/BHX2/BottlenoseDB') puts()
def printHelp(): puts("UHT DB Management script") puts() puts("Available commands: ") puts() puts(columns(["setupuser", 20], ["Prints the SQL needed to create the fueloptim user properly. Recommended usage: ./managedb.py setupuser | psql postgres", 60])) puts(columns(["create", 20], ["Creates the initial database from scratch.", 60])) puts(columns(["drop", 20], ["Drops the database (without recreating it)", 60])) puts(columns(["reset", 20], ["Drops the existing database and recreates a new one from scratch", 60])) puts() puts("Available flags:") puts() puts(columns(["-f", 20], ["Force-closes all existing connections when used together with reset or drop", 60])) puts() sys.exit()
def match_entity(id): """ Tries to match a Perseus-Smith entity against a DBpedia entry. """ logger.debug("%s"%id) test_url = format_perseus_uri(id) xml = get(test_url) temp = transform_tei(xml) names = set(parse_xml(temp)["names"]) desc = parse_xml(temp)["desc"] # this is the Smith's entry for n in names: #for t in n.split(): max_res = 10 lookup_results = parse_lookup_reply(do_lookup(n,max_res)) while(len(lookup_results) == max_res): lookup_results = parse_lookup_reply(do_lookup(n,max_res*max_res)) if(len(lookup_results)==max_res): break documents = [(r["uri"],r["desc"]) for r in lookup_results if r["desc"] is not None] logger.debug(documents) if(len(documents)>1): """ there is > 1 result from dbpedia. will try to disambiguate using TFIDF model """ results = suggest_matching(documents,query=desc) for n,r in enumerate(results): logger.debug("##%i## (%s) %s"%(n,r[1],r[0])) puts(columns([colored.green("[SMITH DICTIONARY ENTRY]\n%s\n"%desc.encode("utf-8")),60], [colored.magenta("[DBPEDIA 1st CANDIDATE] (TFIDF score: %s)\n\"%s\"\n"%(results[0][1].encode("utf-8"),results[0][0][1])),None])) puts(colored.cyan("Is \"%s\" the sameAs \"%s\"?\n"%(test_url,results[0][0][0]))) answer = raw_input("[Yy/Nn]: ") return True elif(len(documents)==1): "" "" puts(columns([colored.green("[SMITH DICTIONARY ENTRY]\n%s\n"%desc.encode("utf-8")),60], [colored.magenta("[DBPEDIA 1st CANDIDATE]\n%s\n"%documents[0][1].encode("utf-8")),None])) puts(colored.cyan("Is \"%s\" the sameAs \"%s\"?\n"%(test_url,documents[0][0]))) answer = raw_input("[Yy/Nn]: ") return True else: print "No results from the DBpedia query" return False return
def handle(self, *args, **options): bucket = self.get_bucket() fixtures = list(bucket.list('fixture_')) for fixture in fixtures: fixture.last_modified_dt = dateutil.parser.parse(fixture.last_modified) fixtures.sort(key=lambda x: x.last_modified_dt) for fixture in fixtures: print columns( [fixture.name, 44], [humanize_filesize(fixture.size), 9], [fixture.last_modified_dt.strftime('%d %b %G'), 12], [naturaltime(fixture.last_modified_dt), 25], )
def display_available_branches(): """Displays available branches.""" branches = get_branches() branch_col = len(max([b.name for b in branches], key=len)) + 1 for branch in branches: marker = '*' if (branch.name == repo.head.ref.name) else ' ' color = colored.green if (branch.name == repo.head.ref.name) else colored.yellow pub = '(published)' if branch.is_published else '(unpublished)' print columns([colored.red(marker), 2], [color(branch.name), branch_col], [black(pub), 14])
def find_request_id(profile, bucket, request_id): # pick a file to inspect session = boto3.Session(profile_name=profile) s3_client = session.client('s3') files = s3_client.list_objects_v2(Bucket=bucket, Prefix='cloudfront-log') args = arguments.Args() logs = [k['Key'] for k in files['Contents']] logs.reverse() for log in logs: s3obj = s3_client.get_object(Bucket=bucket, Key=log) with gzip.open(s3obj['Body'], 'rt') as f: responses = parse(f.readlines()) for r in responses: if r.request_id == request_id: puts("Found!") puts( columns( [colored.green(r.http_method), 6], [colored.red(r.status_code), 6], [colored.red(r.edge_result_type), 10], [colored.yellow(r.request_id), 56], [colored.magenta(str(r.timestamp)), 20], [colored.blue(r.path), None], )) exit(0)
def get_issues(user, repo): url = ISSUES_ENDPOINT % (user, repo) github_issues_url = ISSUES_PAGE % (user, repo) if valid_credentials(): connect = requests.get(url, auth=(get_username(), get_password())) else: connect = requests.get(url) json_data = simplejson.loads(connect.content) try: json_data['message'] puts('{0}. {1}'.format(colored.blue('octogit'), colored.red('Do you even have a Github account? Bad Credentials'))) return except: pass if len(json_data) == 0 : puts('{0}. {1}'.format(colored.blue('octogit'), colored.cyan('Looks like you are perfect welcome to the club.'))) return get_number_issues(json_data) puts('link. {0} \n'.format(colored.green(github_issues_url))) puts('listing all {0} issues.'.format(colored.red(get_number_issues(json_data)))) for issue in json_data: #skip pull requests if issue['pull_request']['html_url'] != None: continue width = [[colored.yellow('#'+str(issue['number'])), 5],] width.append(['{0}'.format(issue['title']), 70]) width.append([colored.red('('+ issue['user']['login']+')'), None]) puts(columns(*width))
def display_available_branches(self): """Displays available branches.""" if not self.repo.remotes: remote_branches = False else: remote_branches = True branches = self.get_branches(local=True, remote_branches=remote_branches) if not branches: click.echo(crayons.red('No branches available')) return branch_col = len(max([b.name for b in branches], key=len)) + 1 for branch in branches: try: branch_is_selected = ( branch.name == self.get_current_branch_name()) except TypeError: branch_is_selected = False marker = '*' if branch_is_selected else ' ' color = colored.green if branch_is_selected else colored.yellow pub = '(published)' if branch.is_published else '(unpublished)' click.echo( columns([colored.red(marker), 2], [color(branch.name, bold=True), branch_col], [black(pub), 14]))
def display_available_branches(): """Displays available branches.""" branches = get_branches() if not branches: print(colored.red('No branches available')) return branch_col = len(max([b.name for b in branches], key=len)) + 1 for branch in branches: try: branch_is_selected = (branch.name == repo.head.ref.name) except TypeError: branch_is_selected = False marker = '*' if branch_is_selected else ' ' color = colored.green if branch_is_selected else colored.yellow pub = '(published)' if branch.is_published else '(unpublished)' print(columns( [colored.red(marker), 2], [color(branch.name), branch_col], [black(pub), 14] ))
def listall(owner=None, repo=None): path = "{0}repositories/{1}/{2}/pullrequests/".format(BASE_URL_V2, owner, repo) res = requests.get(path, auth=(USERNAME, PASSWORD)) if res.content: pulls = json.loads(res.content).get('values') # print pulls puts(colored.magenta(columns(['Id', 5], ['Status', 8], [str('Repository'), 12], ['Source branch', 45], ['Destination branch', 35], ['Created on', 35]))) for pull in pulls: puts(colored.green(columns([str(pull['id']), 5], [pull['state'], 8], [str(pull['source']['repository']['name']), 12], [pull['source']['branch']['name'], 45], [pull['destination']['branch']['name'], 35], [pull['created_on'], 35]))) else: puts(colored.red('Sorry, there is no pull request for this repository.'))
def add_new_drive(): puts( colored.green( 'Here are all the Drives belong to the accounts you have linked and not yet added:\n' )) drive_list = [] for key, account in account_store.get_all_accounts().items(): account_id, account_type = key puts( colored.magenta('{} Account "{}" ({})'.format( account_type.upper(), account_id, account.profile.name))) drive_root = drive_store.get_drive_root(account_id, account_type) all_drives = drive_root.get_all_drives() saved_drives = drive_store.get_all_drives() with indent(4): puts( columns([(colored.green('Index')), 10], [(colored.cyan('Drive ID')), 20], [(colored.cyan('Type')), 10], [(colored.cyan('Default?')), 10], [(colored.cyan('State')), 10], [(colored.yellow('Total')), 10], [(colored.yellow('Used')), 10], [(colored.yellow('Free')), 10])) for id, drive in all_drives.items(): if drive_store.get_key( id, drive.root.account.profile.user_id, drive.root.account.TYPE) in saved_drives: continue quota = drive.quota puts( columns([str(len(drive_list)), 10], [id, 20], [drive.type, 10], ['Yes' if drive.is_default else '', 10], [quota.state, 10], [pretty_print_bytes(quota.total), 10], [pretty_print_bytes(quota.used), 10], [pretty_print_bytes(quota.remaining), 10])) drive_list.append(drive) if len(drive_list) == 0: puts() puts(colored.red('It seems there is no more Drive to add.')) else: try: prompt_add_drive(drive_list) except KeyboardInterrupt: puts(colored.green('Aborted.'))
def add_new_drive(): puts(colored.green("Here are all the Drives belonging to the accounts you have linked and not yet added:\n")) drive_list = [] for key, account in account_store.get_all_accounts().items(): account_id, account_type = key puts(colored.magenta('{} Account "{}" ({})'.format(account_type.upper(), account_id, account.profile.name))) drive_root = drive_store.get_drive_root(account_id, account_type) all_drives = drive_root.get_all_drives() saved_drives = drive_store.get_all_drives() puts( columns( [(colored.green("Index")), 5], [(colored.cyan("Drive ID")), 18], [(colored.cyan("Type")), 8], [(colored.cyan("Default?")), 8], [(colored.cyan("State")), 7], [(colored.yellow("Total")), 10], [(colored.yellow("Used")), 8], [(colored.yellow("Free")), 10], ) ) for id, drive in all_drives.items(): if drive_store.get_key(id, drive.root.account.profile.user_id, drive.root.account.TYPE) in saved_drives: continue quota = drive.quota puts( columns( [str(len(drive_list)), 5], [id, 18], [drive.type, 8], ["Yes" if drive.is_default else "", 8], [quota.state, 7], [pretty_print_bytes(quota.total), 10], [pretty_print_bytes(quota.used), 8], [pretty_print_bytes(quota.remaining), 10], ) ) drive_list.append(drive) puts() if len(drive_list) == 0: puts() puts(colored.red("It seems there is no more Drive to add.")) else: try: prompt_add_drive(drive_list) except KeyboardInterrupt: puts(colored.green("Aborted."))
def puts_package_list(paginator, current_page, highlighted_item): packages = paginator.page(current_page) starting_index = paginator.pagination*(current_page-1) pagination_tpl = "Page %s of %s" %(current_page, paginator.num_pages) puts(colored.green('='*80), newline=False) puts(pagination_tpl) puts(colored.green('='*80)) for index, package in enumerate(packages): #if package.check_installed: # puts('* ' + colored.green(package.title) + ' [Installed] ' + colored.yellow(package.pypi_package_name) + ' ' + colored.yellow(package.repo_name)) #else: # puts('* ' + colored.green(package.title) + ' ' + colored.yellow(package.pypi_package_name) + ' ' + colored.yellow(package.repo_name)) #STREAM.write(package.title+"\r\n") #STREAM.write("\r") with indent(indent=6, quote="%s)" %str(starting_index+index+1)): title = colored.green(package.title) if index+1 == highlighted_item: title = " * " + title if package.installed: if not package.installed_version: # There is no package version! We can't deduce if a new version is really available. title += colored.yellow(" [Installed] ") else: # Package version is there. Everything normal and good! title += colored.yellow(" [Installed %s] " %package.installed_version) if versioner(package.installed_version) < versioner(package.pypi_version): title += colored.red(" [New version %s] " %package.pypi_version) puts(title) info = { "using": package.usage_count, "PYPI dl": package.pypi_downloads, #"forks": package.repo_forks, "watching": package.repo_watchers, } cols = [["%s: %s" %(key, value), 20] for key,value in info.items()] with indent(indent=6): #puts() puts(columns(*cols)) puts() puts(colored.green('='*80), newline=False) puts(pagination_tpl) puts(colored.green('='*80))
def make_text_menu(dmenu): COLW = 76 COLH = 24 assert isinstance(dmenu, dict) if dmenu['order']: colnames = dmenu['order'] else: colnames = dmenu.keys() #Find each maximum column width by longest string colwidths = [ max(len(max(dmenu[key])) + 1, len(key) + 1) for key in colnames ] #Evenly distribute space to all columns mincolwidth = int(np.floor(COLW / len(colwidths))) #If bigger than window, keep those less than mincolwidth, long strings to 0 for placeholder if sum(colwidths) > COLW: colwidths = [ colwidth if colwidth < mincolwidth else 0 for colwidth in colwidths ] #Find the long ones ind, = np.where(np.array(colwidths) == 0) #Split remaining space between long string columns if len(ind): maxwidth = int(np.floor((COLW - sum(colwidths)) / len(ind))) colwidths = [ colwidth if colwidth else maxwidth for colwidth in colwidths ] #put headers cols = [[(colored.red(colname)), colwidth] for colname, colwidth in zip(colnames, colwidths)] puts(columns(*cols)) #loop through choices and print to console for i in range(len(dmenu[colnames[0]])): cols = [[dmenu[key][i], colwidth] for key, colwidth in zip(colnames, colwidths)] puts(columns(*cols))
def print_system_info(): puts(colored.green('\nSystem Information')) items = { 'User name': OS_USER_NAME, 'Configuration path': CONFIG_DIR, } for k, v in items.items(): puts(columns([k, 30], [v, None]))
def handle(self, *args, **options): bucket = self.get_bucket() fixtures = list(bucket.list('fixture_')) for fixture in fixtures: fixture.last_modified_dt = dateutil.parser.parse( fixture.last_modified) fixtures.sort(key=lambda x: x.last_modified_dt) for fixture in fixtures: print columns( [fixture.name, 44], [humanize_filesize(fixture.size), 9], [fixture.last_modified_dt.strftime('%d %b %G'), 12], [naturaltime(fixture.last_modified_dt), 25], )
def print_tickets_list(tickets): teams = _db_api_get_authenticated("/teams/info")['teams'] puts(colored.blue(columns(["ID#", 4], ["Team Name", 20], ["Subject", 24], ["Status", 7], bold=True))) for t in tickets: if t['done'] == 1: puts(colored.green(columns([str(t['id']), 4], [teams[str(t['team_id'])]['name'], 20], [t['subject'], 24], ['DONE' if t['done'] == 1 else 'OPEN', 7]))) else: puts(colored.red(columns([str(t['id']), 4], [teams[str(t['team_id'])]['name'], 20], [t['subject'], 24], ['DONE' if t['done'] == 1 else 'OPEN', 7])))
def word_description(words_and_descriptions): """Given a list of two-tuples, the first item being a word and the second being a description of that word, return a neatly-formatted string with the words and descriptions in columns. """ lines = [] for word, description in words_and_descriptions: lines.append(columns([" ", 2], [word, 13], [description, 65])) return "\n".join(lines)
def _print_status_body(cts: dict): """Display messages for stakkr status (body)""" for container in sorted(cts.keys()): ct_data = cts[container] if ct_data['ip'] == '': continue puts( columns([ct_data['compose_name'], 16], [ct_data['ip'], 15], [ct_data['traefik_host'], 32], [ct_data['image'], 32], [ct_data['id'][:12], 15], [ct_data['name'], 25]))
def timeline(reverse): timeline = twtxt.timeline(reverse=reverse) for tweet in timeline: tweet.process_text() puts( columns( [colored.black(tweet.source.nick, bold=True), 10], [colored.magenta(humanize.naturaldate(tweet.timestamp)), 10], [tweet.text, 59]))
def view(nick, reverse): source = twtxt.view(nick, reverse=reverse) puts("@{0} - {1}".format(colored.black(source.nick, bold=True), source.url)) for tweet in source.get_tweets(): puts( columns( [colored.magenta(humanize.naturaldate(tweet.timestamp)), 10], [tweet.text, 69]))