def cves(config, team, artifact, tag, url, output): '''List all CVE's found by Clair service for a specific artifact tag''' set_pierone_url(config, url) rows = [] token = get_token() for artifact_tag in get_tags(config.get('url'), team, artifact, token): if artifact_tag['name'] == tag: installed_software = get_clair_features(artifact_tag.get('clair_details'), token) for software_pkg in installed_software: for cve in software_pkg.get('Vulnerabilities', []): rows.append({ 'cve': cve['Name'], 'severity': cve['Severity'].upper(), 'affected_feature': '{}:{}'.format(software_pkg['Name'], software_pkg['Version']), 'fixing_feature': cve.get( 'FixedBy') and '{}:{}'.format(software_pkg['Name'], cve['FixedBy']), 'link': cve['Link'], }) severity_rating = ['CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'NEGLIGIBLE', 'UNKNOWN', 'PENDING'] rows.sort(key=lambda row: severity_rating.index(row['severity'])) with OutputFormat(output): titles = { 'cve': 'CVE', 'severity': 'Severity', 'affected_feature': 'Affected Feature', 'fixing_feature': 'Fixing Feature', 'link': 'Link' } print_table(['cve', 'severity', 'affected_feature', 'fixing_feature', 'link'], rows, titles=titles, styles=CVE_STYLES)
def traffic(stack_name: str, stack_version: Optional[str], percentage: Optional[int], region: Optional[str], remote: Optional[str], output: Optional[str]): '''Manage stack traffic''' lizzy = setup_lizzy_client(remote) if percentage is None: stack_reference = [stack_name] with Action('Requesting traffic info..'): stack_weights = [] for stack in lizzy.get_stacks(stack_reference, region=region): if stack['status'] in ['CREATE_COMPLETE', 'UPDATE_COMPLETE']: stack_id = '{stack_name}-{version}'.format_map(stack) traffic = lizzy.get_traffic(stack_id, region=region) stack_weights.append({ 'stack_name': stack_name, 'version': stack['version'], 'identifier': stack_id, 'weight%': traffic['weight'] }) cols = 'stack_name version identifier weight%'.split() with OutputFormat(output): print_table(cols, sorted(stack_weights, key=lambda x: x['identifier'])) else: with Action('Requesting traffic change..'): stack_id = '{stack_name}-{stack_version}'.format_map(locals()) lizzy.traffic(stack_id, percentage, region=region)
def list_clusters(args): config = stups_cli.config.load_config(APP_NAME) cluster_registry = config.get('cluster_registry') if not cluster_registry: cluster_registry = fix_url(click.prompt('URL of Cluster Registry')) token = zign.api.get_token('kubectl', ['uid']) response = requests.get( '{}/kubernetes-clusters'.format(cluster_registry), params={'lifecycle_status': 'ready'}, headers={'Authorization': 'Bearer {}'.format(token)}, timeout=20) response.raise_for_status() data = response.json() rows = [] for cluster in data['items']: status = cluster.get('status', {}) version = status.get('current_version', '')[:7] if status.get('next_version') and status.get( 'current_version') != status.get('next_version'): version += ' (updating)' cluster['version'] = version rows.append(cluster) rows.sort(key=lambda c: (c['alias'], c['id'])) print_table('id alias environment channel version'.split(), rows)
def request_access_interactive(): region = click.prompt('AWS region', default=get_region()) ec2 = boto3.resource('ec2', region_name=region) reservations = ec2.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) name = stack_name = stack_version = None instance_list = [] for r in reservations: tags = r.tags if not tags: continue for d in tags: d_k, d_v = d['Key'], d['Value'] if d_k == 'Name': name = d_v elif d_k == 'StackName': stack_name = d_v elif d_k == 'StackVersion': stack_version = d_v if name and stack_name and stack_version: instance_list.append({'name': name, 'stack_name': stack_name, 'stack_version': stack_version, 'instance_id': r.instance_id, 'private_ip': r.private_ip_address}) instance_count = len(instance_list) sorted_instance_list = sorted(instance_list, key=operator.itemgetter('stack_name', 'stack_version')) {d.update({'index': idx}) for idx, d in enumerate(sorted_instance_list, start=1)} print() print_table('index name stack_name stack_version private_ip instance_id'.split(), sorted_instance_list) print() allowed_choices = ["{}".format(n) for n in range(1, instance_count + 1)] instance_index = int(click.prompt('Choose an instance (1-{})'.format(instance_count), type=click.Choice(allowed_choices))) - 1 host = sorted_instance_list[instance_index]['private_ip'] reason = click.prompt('Reason', default='Troubleshooting') return (host, reason)
def list_stacks(stack_ref: List[str], all: bool, remote: str, region: str, watch: int, output: str): """List Lizzy stacks""" lizzy = setup_lizzy_client(remote) stack_references = parse_stack_refs(stack_ref) while True: rows = [] for stack in lizzy.get_stacks(stack_references, region=region): creation_time = dateutil.parser.parse(stack['creation_time']) rows.append({'stack_name': stack['stack_name'], 'version': stack['version'], 'status': stack['status'], 'creation_time': creation_time.timestamp(), 'description': stack['description']}) rows.sort(key=lambda x: (x['stack_name'], x['version'])) with OutputFormat(output): print_table( 'stack_name version status creation_time description'.split(), rows, styles=STYLES, titles=TITLES) if watch: # pragma: no cover time.sleep(watch) click.clear() else: break
def list_apps(config, output, since, team, **kwargs): '''List applications''' url = get_url(config) token = get_token() since_str = parse_since(since) if since else '' params = {} r = request(url, '/apps', token, params=params) r.raise_for_status() data = r.json() rows = [] for row in data: if not row['active'] and not kwargs['all']: continue if team and row['team_id'] != team: continue if row['last_modified'] < since_str: continue row['last_modified_time'] = parse_time(row['last_modified']) rows.append(row) # we get the newest violations first, but we want to print them in order rows.sort(key=lambda r: r['id']) with OutputFormat(output): print_table(['id', 'team_id', 'name', 'subtitle', 'last_modified_time'], rows, titles={'last_modified_time': 'Modified'}, max_column_widths={'name': 32, 'subtitle': 32})
def list_versions(config, application_id, output, since): '''List application versions''' url = get_url(config) token = get_token() since_str = parse_since(since) params = {} r = request(url, '/apps/{}/versions'.format(application_id), token, params=params) r.raise_for_status() data = r.json() rows = [] for row in data: if row['last_modified'] < since_str: continue r = request(url, '/apps/{}/versions/{}/approvals'.format(application_id, row['id']), token) row['approvals'] = ', '.join(['{}: {}'.format(x['approval_type'], x['user_id']) for x in r.json()]) row['last_modified_time'] = parse_time(row['last_modified']) rows.append(row) # we get the newest violations first, but we want to print them in order rows.sort(key=lambda r: r['last_modified_time']) with OutputFormat(output): print_table(['application_id', 'id', 'artifact', 'approvals', 'last_modified_time'], rows, titles={'last_modified_time': 'Modified'})
def types(config, output): '''List violation types''' url = config.get('url') if not url: raise click.ClickException( 'Missing configuration URL. Please run "stups configure".') token = get_token() r = request(url, '/api/violation-types', token) r.raise_for_status() data = r.json() rows = [] for row in data: row['created_time'] = parse_time(row['created']) rows.append(row) rows.sort(key=lambda r: r['id']) with OutputFormat(output): print_table(['id', 'violation_severity', 'created_time', 'help_text'], rows, titles={ 'created_time': 'Created', 'violation_severity': 'Sev.' })
def cli(url, suspicious): token = zign.api.get_existing_token('test') access_token = token['access_token'] r = requests.get( url + '/accounts', headers={'Authorization': 'Bearer {}'.format(access_token)}) accounts = r.json() r = requests.get( url + '/connections', headers={'Authorization': 'Bearer {}'.format(access_token)}) data = r.json() rows = [] for account, connections in sorted(data.items()): for conn in connections: account_id, region = account.split('/') conn['account_id'] = account_id conn['account_name'] = accounts.get(account_id, {}).get('name') conn['region'] = region if not suspicious or conn['dest_port'] != 443: rows.append(conn) print_table( 'account_id account_name region dest dest_port source score'.split(), rows)
def image(config, image, url, output): '''List tags that point to this image''' set_pierone_url(config, url) token = get_token() try: resp = request(config.get('url'), '/tags/{}'.format(image), token) except requests.HTTPError as error: status_code = error.response.status_code if status_code == 404: click.echo('Image {} not found'.format(image)) elif status_code == 412: click.echo('Prefix {} matches more than one image.'.format(image)) else: raise error return tags = resp.json() with OutputFormat(output): print_table(['team', 'artifact', 'name'], tags, titles={ 'name': 'Tag', 'artifact': 'Artifact', 'team': 'Team' })
def test_tsv_out(capsys): with OutputFormat('tsv'): warning('this is a warning') print_table('a b'.split(), [{"a": 1}, {"b": 2}]) out, err = capsys.readouterr() assert 'a\tb\n1\t\n\t2\n' == out assert 'this is a warning\n' == err
def pull_requests(config, output): '''List pull requests''' token = config.get('github_access_token') repositories = get_repositories() rows = [] for issue in get_my_issues(token): pr = issue.get('pull_request') if pr: repo = repositories.get(issue['repository']['url']) if repo: r = request(session.get, pr['url'], token) pr = r.json() issue.update(**pr) issue['repository'] = repo['full_name'] issue['created_time'] = parse_time(issue['created_at']) issue['created_by'] = issue['user']['login'] issue['labels'] = ', '.join( [l['name'] for l in issue['labels']]) rows.append(issue) rows.sort(key=lambda x: (x['repository'], x['number'])) with OutputFormat(output): print_table([ 'repository', 'number', 'title', 'labels', 'mergeable', 'mergeable_state', 'created_time', 'created_by' ], rows)
def test_text_out(capsys): with OutputFormat('text'): warning('this is a warning') print_table('a b'.split(), [{}, {}]) out, err = capsys.readouterr() assert u'A│B\n \n \n' == out assert 'this is a warning\n' == err
def scm_source(config, team, artifact, tag, url, output): '''Show SCM source information such as GIT revision''' set_pierone_url(config, url) token = get_token() tags = get_tags(config.get('url'), team, artifact, token) if not tag: tag = [t['name'] for t in tags] rows = [] for t in tag: row = request(config.get('url'), '/teams/{}/artifacts/{}/tags/{}/scm-source'.format(team, artifact, t), token).json() if not row: row = {} row['tag'] = t matching_tag = [d for d in tags if d['name'] == t] row['created_by'] = ''.join([d['created_by'] for d in matching_tag]) if matching_tag: row['created_time'] = parse_time(''.join([d['created'] for d in matching_tag])) rows.append(row) rows.sort(key=lambda row: (row['tag'], row.get('created_time'))) with OutputFormat(output): print_table(['tag', 'author', 'url', 'revision', 'status', 'created_time', 'created_by'], rows, titles={'tag': 'Tag', 'created_by': 'By', 'created_time': 'Created', 'url': 'URL', 'revision': 'Revision', 'status': 'Status'}, max_column_widths={'revision': 10})
def pull_requests(config, output): """List pull requests""" token = config.get("github_access_token") repositories = get_repositories() rows = [] for issue in get_my_issues(token): pr = issue.get("pull_request") if pr: repo = repositories.get(issue["repository"]["url"]) if repo: r = request(session.get, pr["url"], token) pr = r.json() issue.update(**pr) issue["repository"] = repo["full_name"] issue["created_time"] = parse_time(issue["created_at"]) issue["created_by"] = issue["user"]["login"] issue["labels"] = ", ".join([l["name"] for l in issue["labels"]]) rows.append(issue) rows.sort(key=lambda x: (x["repository"], x["number"])) with OutputFormat(output): print_table( ["repository", "number", "title", "labels", "mergeable", "mergeable_state", "created_time", "created_by"], rows, )
def list_access_requests(obj, user, odd_host, status, limit, offset, output): '''List access requests filtered by user, host and status''' config = load_config(obj) if user == '*': user = None if odd_host == '*': odd_host = None elif odd_host == 'MY-ODD-HOST': odd_host = config.get('odd_host') token = zign.api.get_existing_token('piu') if not token: raise click.UsageError('No valid OAuth token named "piu" found.') access_token = token.get('access_token') params = {'username': user, 'hostname': odd_host, 'status': status, 'limit': limit, 'offset': offset} r = requests.get(config.get('even_url').rstrip('/') + '/access-requests', params=params, headers={'Authorization': 'Bearer {}'.format(access_token)}) r.raise_for_status() rows = [] for req in r.json(): req['created_time'] = datetime.datetime.strptime(req['created'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp() rows.append(req) rows.sort(key=lambda x: x['created_time']) with OutputFormat(output): print_table('username hostname remote_host reason lifetime_minutes status status_reason created_time'.split(), rows, styles=STYLES, titles=TITLES, max_column_widths=MAX_COLUMN_WIDTHS)
def list(obj, output): '''List AWS profiles''' profile_list = get_profiles(obj['service_url']) default = obj['default'] if 'default' in obj else {} if 'aliases' in obj: alias_list = {(v['account_name'], v['role_name']): alias for alias, v in obj['aliases'].items()} else: alias_list = {} for profile in profile_list: if (default and (profile['account_name'], profile['role_name']) == (default['account_name'], default['role_name'])): profile['default'] = '✓' else: profile['default'] = '' if (profile['account_name'], profile['role_name']) in alias_list: profile['alias'] = alias_list[(profile['account_name'], profile['role_name'])] else: profile['alias'] = '' profile_list.sort(key=lambda r: r['account_name']) with OutputFormat(output): print_table( ['account_id', 'account_name', 'role_name', 'alias', 'default'], profile_list)
def scm_source(config, team, artifact, tag, url, output): '''Show SCM source information such as GIT revision''' url = set_pierone_url(config, url) api = PierOne(url) token = get_token() tags = get_tags(url, team, artifact, token) if not tags: raise click.UsageError('Artifact or Team does not exist! ' 'Please double check for spelling mistakes.') if not tag: tag = [t['name'] for t in tags] rows = [] for t in tag: image = DockerImage(url, team, artifact, t) try: scm_source = api.get_scm_source(image) row = scm_source except ArtifactNotFound: row = {} row['tag'] = t matching_tag = [d for d in tags if d['name'] == t] row['created_by'] = ''.join([d['created_by'] for d in matching_tag]) if matching_tag: row['created_time'] = parse_time(''.join([d['created'] for d in matching_tag])) rows.append(row) rows.sort(key=lambda row: (row['tag'], row.get('created_time'))) with OutputFormat(output): print_table(['tag', 'author', 'url', 'revision', 'status', 'created_time', 'created_by'], rows, titles={'tag': 'Tag', 'created_by': 'By', 'created_time': 'Created', 'url': 'URL', 'revision': 'Revision', 'status': 'Status'}, max_column_widths={'revision': 10})
def render_entities(entities, output): rows = [] for e in entities: row = e s = sorted(e.keys()) key_values = [] for k in s: if k not in ('id', 'type'): if k == 'last_modified': row['last_modified_time'] = (calendar.timegm( time.strptime(row.pop('last_modified'), LAST_MODIFIED_FMT))) else: key_values.append('{}={}'.format(k, e[k])) row['data'] = ' '.join(key_values) rows.append(row) rows.sort(key=lambda r: (r['last_modified_time'], r['id'], r['type'])) with OutputFormat(output): print_table('id type last_modified_time data'.split(), rows, titles={'last_modified_time': 'Modified'})
def image(config, image, output): '''List tags that point to this image''' token = get_token() resp = request(config.get('url'), '/tags/{}'.format(image), token['access_token']) if resp.status_code == 404: click.echo('Image {} not found'.format(image)) return if resp.status_code == 412: click.echo('Prefix {} matches more than one image.'.format(image)) return tags = resp.json() with OutputFormat(output): print_table(['team', 'artifact', 'name'], tags, titles={ 'name': 'Tag', 'artifact': 'Artifact', 'team': 'Team' })
def test_text_out(capsys): with OutputFormat('text'): warning('this is a warning') print_table('a b'.split(), [{}, {}]) out, err = capsys.readouterr() assert 'A│B\n \n \n' == out assert 'this is a warning\n' == err
def tags(config, team, artifact, output): '''List all tags''' token = get_token() if not artifact: artifact = get_artifacts(config.get('url'), team, token['access_token']) rows = [] for art in artifact: r = get_tags(config.get('url'), team, art, token['access_token']) rows.extend([{ 'team': team, 'artifact': art, 'tag': row['name'], 'created_by': row['created_by'], 'created_time': datetime.datetime.strptime(row['created'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp() } for row in r]) rows.sort(key=lambda row: (row['team'], row['artifact'], row['tag'])) with OutputFormat(output): print_table(['team', 'artifact', 'tag', 'created_time', 'created_by'], rows, titles={ 'created_time': 'Created', 'created_by': 'By' })
def scm_source(config, team, artifact, tag, url, output): '''Show SCM source information such as GIT revision''' set_pierone_url(config, url) token = get_token() tags = get_tags(config.get('url'), team, artifact, token) if not tags: raise click.UsageError('Artifact or Team does not exist! ' 'Please double check for spelling mistakes.') if not tag: tag = [t['name'] for t in tags] rows = [] for t in tag: r = request(config.get('url'), '/teams/{}/artifacts/{}/tags/{}/scm-source'.format(team, artifact, t), token, True) if r is None: row = {} else: row = r.json() row['tag'] = t matching_tag = [d for d in tags if d['name'] == t] row['created_by'] = ''.join([d['created_by'] for d in matching_tag]) if matching_tag: row['created_time'] = parse_time(''.join([d['created'] for d in matching_tag])) rows.append(row) rows.sort(key=lambda row: (row['tag'], row.get('created_time'))) with OutputFormat(output): print_table(['tag', 'author', 'url', 'revision', 'status', 'created_time', 'created_by'], rows, titles={'tag': 'Tag', 'created_by': 'By', 'created_time': 'Created', 'url': 'URL', 'revision': 'Revision', 'status': 'Status'}, max_column_widths={'revision': 10})
def test_json_out(capsys): with OutputFormat('json'): warning('this is a warning') print_table('a b'.split(), [{}, {}]) out, err = capsys.readouterr() assert '[{"a": null, "b": null}, {"a": null, "b": null}]\n' == out assert 'this is a warning\n' == err
def scm_source(config, team, artifact, tag, output): '''Show SCM source information such as GIT revision''' token = get_token() tags = get_tags(config.get('url'), team, artifact, token['access_token']) if not tag: tag = [t['name'] for t in tags] rows = [] for t in tag: row = request(config.get('url'), '/teams/{}/artifacts/{}/tags/{}/scm-source'.format(team, artifact, t), token['access_token']).json() if not row: row = {} row['tag'] = t matching_tag = [d for d in tags if d['name'] == t] row['created_by'] = ''.join([d['created_by'] for d in matching_tag]) if matching_tag: row['created_time'] = datetime.datetime.strptime(''.join([d['created'] for d in matching_tag]), '%Y-%m-%dT%H:%M:%S.%f%z').timestamp() rows.append(row) rows.sort(key=lambda row: (row['tag'], row.get('created_time'))) with OutputFormat(output): print_table(['tag', 'author', 'url', 'revision', 'status', 'created_time', 'created_by'], rows, titles={'tag': 'Tag', 'created_by': 'By', 'created_time': 'Created', 'url': 'URL', 'revision': 'Revision', 'status': 'Status'})
def list_access_requests(config_file, user, odd_host, status, limit, offset, output, region): '''List access requests filtered by user, host and status''' config = load_config(config_file) if user == '*': user = None if odd_host == '*': odd_host = None elif odd_host is None: odd_host = piu.utils.find_odd_host(region) or config.get('odd_host') access_token = zign.api.get_token('piu', ['piu']) params = {'username': user, 'hostname': odd_host, 'status': status, 'limit': limit, 'offset': offset} r = requests.get(config.get('even_url').rstrip('/') + '/access-requests', params=params, headers={'Authorization': 'Bearer {}'.format(access_token)}) r.raise_for_status() rows = [] for req in r.json(): req['created_time'] = parse_time(req['created']) rows.append(req) rows.sort(key=lambda x: x['created_time']) with OutputFormat(output): print_table('username hostname remote_host reason lifetime_minutes status status_reason created_time'.split(), rows, styles=STYLES, titles=TITLES, max_column_widths=MAX_COLUMN_WIDTHS)
def request_access_interactive(region, odd_host): region = click.prompt('AWS region', default=region) odd_host = click.prompt('Odd SSH bastion hostname', default=odd_host) all_instances = piu.utils.list_running_instances(region, []) stack_instances = [instance for instance in all_instances if instance.name and instance.stack_name and instance.stack_version] instance_count = len(stack_instances) if instance_count == 0: raise click.ClickException('No running instances were found.') stack_instances.sort(key=operator.attrgetter('stack_name', 'stack_version')) print() table_entries = [dict(index=idx, **instance._asdict()) for idx, instance in enumerate(stack_instances, start=1)] print_table( 'index name stack_name stack_version private_ip instance_id'.split(), table_entries) print() if instance_count > 1: allowed_choices = ["{}".format(n) for n in range(1, instance_count + 1)] instance_index = int(click.prompt('Choose an instance (1-{})'.format(instance_count), type=click.Choice(allowed_choices))) - 1 else: click.confirm('Connect to {}?'.format(stack_instances[0].name), default=True, abort=True) instance_index = 0 host = stack_instances[instance_index].private_ip reason = click.prompt('Reason', default='Troubleshooting') return (host, odd_host, reason)
def test_yaml_out(capsys): with OutputFormat('yaml'): warning('this is a warning') print_table('a b'.split(), [{}, {}]) out, err = capsys.readouterr() assert 'a: null\nb: null\n---\na: null\nb: null\n\n' == out assert 'this is a warning\n' == err
def list_change_requests(config): '''List change requests''' response = request(config, requests.get, '/change-requests') items = response.json()['items'] rows = [] for row in items: rows.append(row) print_table('id platform kind user executed'.split(), rows)
def artifacts(config, team, output): '''List all team artifacts''' token = get_token() result = get_artifacts(config.get('url'), team, token) rows = [{'team': team, 'artifact': name} for name in sorted(result)] with OutputFormat(output): print_table(['team', 'artifact'], rows)
def artifacts(config, team, url, output): """List all team artifacts""" url = set_pierone_url(config, url) api = PierOne(url) result = api.get_artifacts(team) rows = [{'team': team, 'artifact': name} for name in sorted(result)] with OutputFormat(output): print_table(['team', 'artifact'], rows)
def teams(config, output): '''List all teams having artifacts in Pier One''' token = get_token() r = request(config.get('url'), '/teams', token) rows = [{'name': name} for name in sorted(r.json())] with OutputFormat(output): print_table(['name'], rows)
def artifacts(config, team, output): '''List all team artifacts''' token = get_token() result = get_artifacts(config.get('url'), team, token['access_token']) rows = [{'team': team, 'artifact': name} for name in sorted(result)] with OutputFormat(output): print_table(['team', 'artifact'], rows)
def teams(config, output): '''List all teams having artifacts in Pier One''' token = get_token() r = request(config.get('url'), '/teams', token['access_token']) rows = [{'name': name} for name in sorted(r.json())] with OutputFormat(output): print_table(['name'], rows)
def list_violations(config, output, since, region, meta, remeta, limit, all, **kwargs): '''List violations''' url = config.get('url') if not url: raise click.ClickException( 'Missing configuration URL. Please run "stups configure".') kwargs['accounts'] = kwargs.get('accounts') or config.get('accounts') token = get_token() params = {'size': limit, 'sort': 'id,DESC'} params['from'] = parse_since(since) params['application-ids'] = kwargs.get('applications') params['application-version-ids'] = kwargs.get('application_versions') params.update(kwargs) r = request(url, '/api/violations', token, params=params) r.raise_for_status() data = r.json()['content'] if (all): params['checked'] = 'true' r = request(url, '/api/violations', token, params=params) r.raise_for_status() data.extend(r.json()['content']) rows = [] for row in data: if region and row['region'] != region: continue if meta and not meta_matches(row['meta_info'], meta): continue if remeta and not meta_matches_re(format_meta_info(row['meta_info']), remeta): continue row['violation_type'] = row['violation_type']['id'] row['created_time'] = parse_time(row['created']) row['meta_info'] = format_meta_info(row['meta_info']) rows.append(row) # we get the newest violations first, but we want to print them in order rows.reverse() with OutputFormat(output): print_table( [ 'account_id', 'region', 'id', 'violation_type', 'instance_id', 'application_id', 'application_version_id', 'meta_info', 'comment', 'created_time' ], rows, titles={ 'created_time': 'Created', 'application_id': 'Application', 'application_version_id': 'Application Version' })
def list_approvals(config, change_request_id): '''Show approvals for given change request''' path = '/change-requests/{}/approvals'.format(change_request_id) response = request(config, requests.get, path) items = response.json()['items'] rows = [] for row in items: rows.append(row) print_table('user created_at'.split(), rows)
def artifacts(config, team, url, output): '''List all team artifacts''' set_pierone_url(config, url) token = get_token() result = get_artifacts(config.get('url'), team, token) rows = [{'team': team, 'artifact': name} for name in sorted(result)] with OutputFormat(output): print_table(['team', 'artifact'], rows)
def cli(url, suspicious, date_from, date_to, output): token = zign.api.get_existing_token('test') if token is None: print("Token 'test' expired") exit(1) if date_from: if date_from.endswith('d'): days = int(date_from.lstrip('-').rstrip('d')) date_from = datetime.datetime.utcnow() - datetime.timedelta( days=days) else: date_from = datetime.datetime.strptime(date_from, '%Y-%m-%d') else: date_from = datetime.datetime.utcnow() if date_to: date_to = datetime.datetime.strptime(date_to, '%Y-%m-%d') else: date_to = datetime.datetime.utcnow() access_token = token['access_token'] r = requests.get( url + '/accounts', headers={'Authorization': 'Bearer {}'.format(access_token)}) r.raise_for_status() accounts = r.json() rows = [] date = date_from while date.date() <= date_to.date(): params = {} params['date'] = date.strftime('%Y-%m-%d') r = requests.get( url + '/connections', headers={'Authorization': 'Bearer {}'.format(access_token)}, params=params) r.raise_for_status() data = r.json() for account, connections in sorted(data.items()): for conn in connections: account_id, region = account.split('/') conn['account_id'] = account_id conn['account_name'] = accounts.get(account_id, {}).get('name') conn['region'] = region if not suspicious or conn['dest_port'] not in (0, 443): rows.append(conn) date += datetime.timedelta(days=1) rows.sort(key=lambda r: (r['account_id'], r['account_name'], r['region'], r['dest'], r['dest_port'])) with OutputFormat(output): print_table( 'account_id account_name region dest dest_port source score'.split( ), rows)
def list_stacks(stack_ref: str, all: bool, watch: int, output: str): """List Lizzy stacks""" config = Configuration() access_token = fetch_token(config.token_url, config.scopes, config.credentials_dir) lizzy = Lizzy(config.lizzy_url, access_token) repeat = True while repeat: try: all_stacks = lizzy.get_stacks() except requests.RequestException as e: fatal_error('Failed to get stacks: {}'.format(e)) if all: stacks = all_stacks else: stacks = [ stack for stack in all_stacks if stack['status'] not in ['LIZZY:REMOVED'] ] if stack_ref: stacks = [ stack for stack in stacks if stack['stack_name'] in stack_ref ] rows = [] for stack in stacks: creation_time = dateutil.parser.parse(stack['creation_time']) rows.append({ 'stack_name': stack['stack_name'], 'version': stack['stack_version'], 'image_version': stack['image_version'], 'status': stack['status'], 'creation_time': creation_time.timestamp() }) rows.sort(key=lambda x: (x['stack_name'], x['version'])) with OutputFormat(output): print_table( 'stack_name version image_version status creation_time'.split( ), rows, styles=STYLES, titles=TITLES) if watch: # pragma: no cover time.sleep(watch) click.clear() else: repeat = False
def list_violations(config, output, since, region, meta, remeta, limit, all, **kwargs): '''List violations''' url = config.get('url') if not url: raise click.ClickException('Missing configuration URL. Please run "stups configure".') kwargs['accounts'] = kwargs.get('accounts') or config.get('accounts') token = get_token() params = {'size': limit, 'sort': 'id,DESC'} params['from'] = parse_since(since) params['application-ids'] = kwargs.get('applications') params['application-version-ids'] = kwargs.get('application_versions') params.update(kwargs) r = request(url, '/api/violations', token, params=params) r.raise_for_status() data = r.json()['content'] if (all): params['checked'] = 'true' r = request(url, '/api/violations', token, params=params) r.raise_for_status() data.extend(r.json()['content']) rows = [] for row in data: if region and row['region'] != region: continue if meta and not meta_matches(row['meta_info'], meta): continue if remeta and not meta_matches_re(format_meta_info(row['meta_info']), remeta): continue row['violation_type'] = row['violation_type']['id'] row['created_time'] = parse_time(row['created']) row['meta_info'] = format_meta_info(row['meta_info']) rows.append(row) # we get the newest violations first, but we want to print them in order rows.reverse() with OutputFormat(output): print_table(['account_id', 'region', 'id', 'violation_type', 'instance_id', 'application_id', 'application_version_id', 'meta_info', 'comment', 'created_time'], rows, titles={'created_time': 'Created', 'application_id': 'Application', 'application_version_id': 'Application Version'})
def tags(config, team: str, artifact, url, output, limit): '''List all tags for a given team''' set_pierone_url(config, url) token = get_token() if limit is None: # show 20 rows if artifact was given, else show only 3 limit = 20 if artifact else 3 if not artifact: artifact = get_artifacts(config.get('url'), team, token) if not artifact: raise click.UsageError( 'The Team you are looking for does not exist or ' 'we could not find any artifacts registered in Pierone! ' 'Please double check for spelling mistakes.') registry = config.get('url') if registry.startswith('https://'): registry = registry[8:] slice_from = -limit rows = [] for art in artifact: image = DockerImage(registry=registry, team=team, artifact=art, tag=None) try: tags = get_image_tags(image, token) except Unauthorized as e: raise click.ClickException(str(e)) else: if not tags: raise click.UsageError( 'Artifact or Team does not exist! ' 'Please double check for spelling mistakes.') rows.extend(tags[slice_from:]) # sorts are guaranteed to be stable, i.e. tags will be sorted by time (as returned from REST service) rows.sort(key=lambda row: (row['team'], row['artifact'])) with OutputFormat(output): titles = { 'created_time': 'Created', 'created_by': 'By', 'severity_fix_available': 'Fixable CVE Severity', 'severity_no_fix_available': 'Unfixable CVE Severity' } print_table([ 'team', 'artifact', 'tag', 'created_time', 'created_by', 'severity_fix_available', 'severity_no_fix_available' ], rows, titles=titles, styles=CVE_STYLES)
def tags(config, team: str, artifact, url, output, limit): '''List all tags for a given team''' registry = set_pierone_url(config, url) api = PierOne(registry) if limit is None: # show 20 rows if artifact was given, else show only 3 limit = 20 if artifact else 3 if not artifact: artifact = api.get_artifacts(team) if not artifact: raise click.UsageError( 'The Team you are looking for does not exist or ' 'we could not find any artifacts registered in Pierone! ' 'Please double check for spelling mistakes.') slice_from = -limit rows = [] for art in artifact: image = DockerImage(registry=registry, team=team, artifact=art, tag=None) try: tags = api.get_image_tags(image) except ArtifactNotFound: raise click.UsageError( "Artifact or Team does not exist! " "Please double check for spelling mistakes.") else: rows.extend(tags[slice_from:]) # sorts are guaranteed to be stable, i.e. tags will be sorted by time (as returned from REST service) rows.sort(key=lambda row: (row['team'], row['artifact'])) with OutputFormat(output): titles = { "created_time": "Created", "created_by": "By", } print_table([ "team", "artifact", "tag", "created_time", "created_by", "status", "status_reason", ], rows, titles=titles)
def show_app(config, application_id, output): '''Show application''' url = get_url(config) token = get_token() r = request(url, '/apps/{}'.format(application_id), token) r.raise_for_status() rows = [{'key': k, 'value': v} for k, v in sorted(r.json().items())] with OutputFormat(output): print_table(['key', 'value'], rows)
def tags(config, team: str, artifact, url, output, limit): '''List all tags for a given team''' registry = set_pierone_url(config, url) api = PierOne(registry) if limit is None: # show 20 rows if artifact was given, else show only 3 limit = 20 if artifact else 3 if not artifact: artifact = api.get_artifacts(team) if not artifact: raise click.UsageError('The Team you are looking for does not exist or ' 'we could not find any artifacts registered in Pierone! ' 'Please double check for spelling mistakes.') slice_from = - limit rows = [] for art in artifact: image = DockerImage(registry=registry, team=team, artifact=art, tag=None) try: tags = api.get_image_tags(image) except ArtifactNotFound: raise click.UsageError("Artifact or Team does not exist! " "Please double check for spelling mistakes.") else: rows.extend(tags[slice_from:]) # sorts are guaranteed to be stable, i.e. tags will be sorted by time (as returned from REST service) rows.sort(key=lambda row: (row['team'], row['artifact'])) with OutputFormat(output): titles = { "created_time": "Created", "created_by": "By", } print_table( [ "team", "artifact", "tag", "created_time", "created_by", "status", "status_reason", ], rows, titles=titles )
def print_traffic_changes(message: list): print_table( [ "stack_name", "version", "identifier", "old_weight%", "delta", "compensation", "new_weight%", "current", ], message, )
def request_access_interactive(): region = click.prompt('AWS region', default=get_region()) ec2 = boto3.resource('ec2', region_name=region) reservations = ec2.instances.filter(Filters=[{ 'Name': 'instance-state-name', 'Values': ['running'] }]) name = stack_name = stack_version = None instance_list = [] for r in reservations: tags = r.tags if not tags: continue for d in tags: d_k, d_v = d['Key'], d['Value'] if d_k == 'Name': name = d_v elif d_k == 'StackName': stack_name = d_v elif d_k == 'StackVersion': stack_version = d_v if name and stack_name and stack_version: instance_list.append({ 'name': name, 'stack_name': stack_name, 'stack_version': stack_version, 'instance_id': r.instance_id, 'private_ip': r.private_ip_address }) instance_count = len(instance_list) sorted_instance_list = sorted(instance_list, key=operator.itemgetter( 'stack_name', 'stack_version')) { d.update({'index': idx}) for idx, d in enumerate(sorted_instance_list, start=1) } print() print_table( 'index name stack_name stack_version private_ip instance_id'.split(), sorted_instance_list) print() allowed_choices = ["{}".format(n) for n in range(1, instance_count + 1)] instance_index = int( click.prompt('Choose an instance (1-{})'.format(instance_count), type=click.Choice(allowed_choices))) - 1 host = sorted_instance_list[instance_index]['private_ip'] reason = click.prompt('Reason', default='Troubleshooting') return (host, reason)
def inspect_contents(config, team, artifact, tag, url, output, limit): '''List image contents (files in tar layers)''' set_pierone_url(config, url) token = get_token() tags = get_tags(config.get('url'), team, artifact, token) if not tag: tag = [t['name'] for t in tags] CHUNK_SIZE = 8192 TYPES = {b'5': 'D', b'0': ' '} rows = [] for t in tag: row = request(config.get('url'), '/v2/{}/{}/manifests/{}'.format(team, artifact, t), token).json() if row.get('layers'): layers = reversed([lay.get('digest') for lay in row.get('layers')]) else: layers = [lay.get('blobSum') for lay in row.get('fsLayers')] if layers: found = 0 for i, layer in enumerate(layers): layer_id = layer if layer_id: response = request(config.get('url'), '/v2/{}/{}/blobs/{}'.format(team, artifact, layer_id), token) with tempfile.NamedTemporaryFile(prefix='tmp-layer-', suffix='.tar') as fd: for chunk in response.iter_content(CHUNK_SIZE): fd.write(chunk) fd.flush() with tarfile.open(fd.name) as archive: has_member = False for member in archive.getmembers(): rows.append({'layer_index': i, 'layer_id': layer_id, 'type': TYPES.get(member.type), 'mode': oct(member.mode)[-4:], 'name': member.name, 'size': member.size, 'created_time': member.mtime}) has_member = True if has_member: found += 1 if found >= limit: break rows.sort(key=lambda row: (row['layer_index'], row['name'])) with OutputFormat(output): print_table(['layer_index', 'layer_id', 'mode', 'name', 'size', 'created_time'], rows, titles={'created_time': 'Created', 'layer_index': 'Idx'}, max_column_widths={'layer_id': 16})
def cli(url, suspicious, date_from, date_to, output): token = zign.api.get_existing_token('test') if token is None: print("Token 'test' expired") exit(1) if date_from: if date_from.endswith('d'): days = int(date_from.lstrip('-').rstrip('d')) date_from = datetime.datetime.utcnow() - datetime.timedelta(days=days) else: date_from = datetime.datetime.strptime(date_from, '%Y-%m-%d') else: date_from = datetime.datetime.utcnow() if date_to: date_to = datetime.datetime.strptime(date_to, '%Y-%m-%d') else: date_to = datetime.datetime.utcnow() access_token = token['access_token'] r = requests.get(url + '/accounts', headers={'Authorization': 'Bearer {}'.format(access_token)}) r.raise_for_status() accounts = r.json() rows = [] date = date_from while date.date() <= date_to.date(): params = {} params['date'] = date.strftime('%Y-%m-%d') r = requests.get(url + '/connections', headers={'Authorization': 'Bearer {}'.format(access_token)}, params=params) r.raise_for_status() data = r.json() for account, connections in sorted(data.items()): for conn in connections: account_id, region = account.split('/') conn['account_id'] = account_id conn['account_name'] = accounts.get(account_id, {}).get('name') conn['region'] = region if not suspicious or conn['dest_port'] not in (0, 443): rows.append(conn) date += datetime.timedelta(days=1) rows.sort(key=lambda r: (r['account_id'], r['account_name'], r['region'], r['dest'], r['dest_port'])) with OutputFormat(output): print_table('account_id account_name region dest dest_port source score'.split(), rows)
def status(config): """Check system status""" response = get('/status') data = response.json() info('Workers:') rows = [] for worker in data.get('workers', []): rows.append(worker) rows.sort(key=lambda x: x.get('name')) print_table(['name', 'check_invocations', 'last_execution_time'], rows) info('Queues:') rows = [] for queue in data.get('queues', []): rows.append(queue) rows.sort(key=lambda x: x.get('name')) print_table(['name', 'size'], rows)
def tags(config, team: str, artifact, url, output, limit): '''List all tags for a given team''' set_pierone_url(config, url) token = get_token() if limit is None: # show 20 rows if artifact was given, else show only 3 limit = 20 if artifact else 3 if not artifact: artifact = get_artifacts(config.get('url'), team, token) if not artifact: raise click.UsageError('The Team you are looking for does not exist or ' 'we could not find any artifacts registered in Pierone! ' 'Please double check for spelling mistakes.') registry = config.get('url') if registry.startswith('https://'): registry = registry[8:] slice_from = - limit rows = [] for art in artifact: image = DockerImage(registry=registry, team=team, artifact=art, tag=None) try: tags = get_image_tags(image, token) except Unauthorized as e: raise click.ClickException(str(e)) else: if tags is None: raise click.UsageError('Artifact or Team does not exist! ' 'Please double check for spelling mistakes.') rows.extend(tags[slice_from:]) # sorts are guaranteed to be stable, i.e. tags will be sorted by time (as returned from REST service) rows.sort(key=lambda row: (row['team'], row['artifact'])) with OutputFormat(output): titles = { 'created_time': 'Created', 'created_by': 'By', 'severity_fix_available': 'Fixable CVE Severity', 'severity_no_fix_available': 'Unfixable CVE Severity' } print_table(['team', 'artifact', 'tag', 'created_time', 'created_by', 'severity_fix_available', 'severity_no_fix_available'], rows, titles=titles, styles=CVE_STYLES)
def list_profiles(obj, output): '''List profiles''' if obj['config']: rows = [] for name, config in obj['config'].items(): row = { 'name': name, 'role': get_role_label(config.get('saml_role')), 'url': config.get('saml_identity_provider_url'), 'user': config.get('saml_user')} rows.append(row) rows.sort(key=lambda r: r['name']) with OutputFormat(output): print_table(sorted(rows[0].keys()), rows)
def repositories(config): '''List repositories''' token = config.get('github_access_token') repositories = get_repositories() for issue in get_my_issues(token): repo = repositories.get(issue['repository']['url']) if repo: repo['open_issues'] = repo.get('open_issues', 0) + 1 if issue.get('pull_request'): repo['open_pull_requests'] = repo.get('open_pull_requests', 0) + 1 rows = [] for url, repo in sorted(repositories.items()): rows.append(repo) print_table(['full_name', 'stargazers_count', 'forks_count', 'open_issues', 'open_pull_requests'], rows)
def list_profiles(obj, output): """List profiles""" if obj["config"]: rows = [] for name, config in obj["config"].items(): row = { "name": name, "role": get_role_label(config.get("saml_role")), "url": config.get("saml_identity_provider_url"), "user": config.get("saml_user"), } rows.append(row) rows.sort(key=lambda r: r["name"]) with OutputFormat(output): print_table(sorted(rows[0].keys()), rows)