def list(count: int) -> None: """ Show a list of data aliases in the project. """ params = { 'limit': count, 'ordering': '-ctime', 'deleted': 'false', 'no_count': 'true' } project = get_project(require=True) assert project if project: params['project'] = project.id aliases = request('get', '/api/v0/datum-aliases/', params=params).json()['results'] if settings.output_format == 'json': return print_json(aliases) if not aliases: info(f'{project}: No data aliases.') return for alias in aliases: alias['url'] = f'datum://{alias["name"]}' alias['datum'] = 'No target' if not alias['datum'] else alias['datum'][ 'name'] print_table( aliases, columns=['name', 'datum', 'mtime', 'url'], headers=['Name', 'Data', 'Last Modified', 'URL'], )
def environments(gpu: bool, price: bool, queue: bool, description: bool) -> None: """ List all available execution environments. """ envs_data = request('get', '/api/v0/environments/', params={ 'limit': 9000 }).json()['results'] envs_data.sort(key=itemgetter('name')) columns_and_headers = filter(None, [ ('name', 'Name'), ('slug', 'Slug'), ('gpu_spec', 'GPU Specification') if gpu else None, ('description', 'Description') if description else None, ('per_hour_price_usd', 'Per-Hour USD$') if price else None, ('per_user_queue_quota', 'Per-User Quota') if queue else None, ('unfinished_job_count', 'Jobs in Queue') if queue else None, ]) columns, headers = zip(*columns_and_headers) for env in envs_data: if 'per_user_queue_quota' in env and env['per_user_queue_quota'] <= 0: env['per_user_queue_quota'] = 'unlimited' print_table( data=envs_data, columns=columns, headers=headers, )
def list(count: int) -> None: """ Show a list of data in the project. """ params = { 'limit': count, 'ordering': '-ctime', 'deleted': 'false', 'no_count': 'true' } project = get_project(require=True) assert project if project: params['project'] = project.id data = request('get', '/api/v0/data/', params=params).json()['results'] if settings.output_format == 'json': return print_json(data) if not data: info(f'{project}: No data.') return for datum in data: datum['url'] = f'datum://{datum["id"]}' datum['execution_string'] = 'Not from exec' if not datum['output_execution'] else \ f'#{datum["output_execution"]["counter"]}' datum['size'] = convert_size(datum['size']) datum['uri'] = 'No URI' if not datum['uri'] else datum['uri'] print_table( data, columns=['name', 'size', 'execution_string', 'ctime', 'url', 'uri'], headers=['Name', 'Size', 'Output of Exec', 'Created At', 'URL', 'URI'], )
def summarize(counters: List[str]) -> None: """ Summarize execution metadata. Use the global `--table-format` switch to output JSON/TSV/CSV/... """ project = get_project(require=True) assert project executions = download_execution_data(project, counters) all_metadata_keys = set() all_metadata = {} for execution in executions.values(): if execution['status'] in ('created', 'queued'): continue cmeta = (execution.get('cumulative_metadata') or {}) all_metadata_keys.update(set(cmeta.keys())) all_metadata[execution['counter']] = (execution, cmeta) table_data = [] for counter, (execution, metadata) in sorted(all_metadata.items()): row = subset_keys(execution, {'counter', 'id', 'duration'}) row.update(metadata) table_data.append(row) columns = ['counter', 'duration'] + list(sorted(all_metadata_keys)) headers = ['Execution', 'Duration'] + list(sorted(all_metadata_keys)) print_table(table_data, columns=columns, headers=headers)
def list(): """ List all projects. """ projects_data = request('get', '/api/v0/projects/', params={'count': 9000}).json()['results'] projects_data.sort(key=itemgetter('name')) print_table(projects_data, ['name', 'description'])
def outputs(counter: str, download_directory: Optional[str], filter_download: Optional[str], force: bool, sync: bool) -> None: """ List and download execution outputs. """ if download_directory: download_directory = download_directory.replace( "{counter}", str(counter)) if sync: watch(counter, force, filter_download, download_directory) return project = get_project(require=True) assert project execution = project.get_execution_from_counter( counter=counter, params={'exclude': 'outputs'}, ) outputs = get_execution_outputs(execution) if not outputs: warn('The execution has no outputs.') return for output in outputs: output['datum_url'] = f"datum://{output['id']}" print_table(outputs, ('name', 'datum_url', 'size')) if download_directory: outputs = filter_outputs(outputs, download_directory, filter_download, force) download_outputs(outputs, download_directory, show_success_message=True)
def list(status: str, count: int) -> None: """ Show a list of executions for the project. """ project = get_project(require=True) assert project params = { 'project': project.id, 'limit': count, 'ordering': '-counter', 'deleted': 'false', } if status: params['status'] = set(status) executions = request('get', '/api/v0/executions/', params=params).json()['results'] if settings.output_format == 'json': return print_json(executions) if not executions: info(f'{project}: No executions.') return for execution in executions: execution['url'] = execution['urls']['display'] execution['duration'] = str( timedelta(seconds=round(execution['duration']) ) if execution['duration'] else '').rjust(10) print_table( executions, columns=['counter', 'status', 'step', 'duration', 'url'], headers=['#', 'Status', 'Step', 'Duration', 'URL'], )
def print_incomplete_executions(project: Project) -> None: incomplete_executions = request('get', '/api/v0/executions/', params={ 'project': project.id, 'status': 'incomplete', 'ordering': 'counter', }).json().get('results', ()) if not incomplete_executions: return click.secho(f'## {len(incomplete_executions)} Incomplete Executions\n', bold=True) print_table(incomplete_executions, ['counter', 'status', 'step'], headers=['#', 'Status', 'Step'])
def print_execution_summary(project_data: dict) -> None: execution_summary = project_data.get('execution_summary', {}).copy() if not execution_summary: return total = execution_summary.pop('count') if not total: click.secho('No executions yet.', fg='cyan') return click.secho(f'## Summary of {total} executions\n', bold=True) print_table( [{ 'status': key.replace('_count', ''), 'count': value } for (key, value) in sorted(execution_summary.items()) if value], columns=['status', 'count'], headers=['Status', 'Count'], ) click.secho('\n')
def outputs(counter, download, filter_download): """ List and download execution outputs. """ execution = get_project(require=True).get_execution_from_counter( counter=counter) outputs = execution.get('outputs', ()) if not outputs: warn('The execution has no outputs.') return print_table(outputs, ('name', 'url', 'size')) if download: if filter_download: outputs = [ output for output in outputs if fnmatch(output['name'], filter_download) ] download_outputs(outputs, download)
def info(counter: str) -> None: """ Show execution info. """ project = get_project(require=True) assert project execution = project.get_execution_from_counter( counter=counter, params={ 'exclude': 'metadata,events', }, ) if settings.output_format == 'json': return print_json(execution) data = {humanize_identifier(key): str(value) for (key, value) in execution.items() if key not in ignored_keys} data['project name'] = execution['project']['name'] data['environment name'] = execution['environment']['name'] print_table(data) print() print_table( {input['name']: '; '.join(input['urls']) for input in execution.get('inputs', ())}, headers=('input', 'URLs'), ) print() print_table( execution.get('parameters', {}), headers=('parameter', 'value'), ) print()
def info(counter): """ Show execution info. """ execution = get_project(require=True).get_execution_from_counter( counter=counter, params={ 'exclude': 'metadata,events', }, ) data = dict((humanize_identifier(key), str(value)) for (key, value) in execution.items() if key not in ignored_keys) data['project name'] = execution['project']['name'] data['environment name'] = execution['environment']['name'] print_table(data) print() print_table( {input['name']: '; '.join(input['urls']) for input in execution.get('inputs', ())}, headers=('input', 'URLs'), ) print() print_table( execution.get('parameters', {}), headers=('parameter', 'value'), ) print()
def environments(): """ List all available execution environments. """ envs_data = request('get', '/api/v0/environments/', params={ 'count': 9000 }).json()['results'] envs_data.sort(key=itemgetter('name')) for env in envs_data: if 'per_user_queue_quota' in env and env['per_user_queue_quota'] <= 0: env['per_user_queue_quota'] = 'unlimited' print_table( data=envs_data, columns=[ 'name', 'slug', 'description', 'per_hour_price_usd', 'per_user_queue_quota', 'unfinished_job_count' ], headers=[ 'Name', 'Slug', 'Description', 'Per-Hour USD$', 'Per-User Quota', 'Jobs in Queue' ], )
def commits(): """ List the commits for the linked project. """ project = get_project(require=True) commits_data = request( 'get', '/api/v0/projects/{id}/commits/'.format(id=project.id)).json() try: current_commit = get_current_commit(project.directory) except: current_commit = None # Filter out ad-hoc executions (and remove the adhocness marker) commits_data = [ commit for commit in commits_data if not commit.pop('adhoc', False) ] # Mark the current commit for commit in commits_data: if commit['identifier'] == current_commit: commit['identifier'] += ' (current)' print_table(commits_data)
def test_print_csv(capsys): print_table([{"a": 1, "b": 2}, {"a": 3, "b": 4}], format="csv") assert capsys.readouterr().out == "a,b\n1,2\n3,4\n"