def stop(counters, all=False): """ Stop one or more in-progress executions. """ project = get_project(require=True) params = {'project': project.id} if counters and all: raise click.UsageError( 'Pass either an execution # or `--all`, not both.') elif counters: params['counter'] = sorted(IntegerRange.parse(counters).as_set()) elif all: params['status'] = 'incomplete' else: warn('Nothing to stop (pass #s or `--all`)') return 1 for execution in request('get', '/api/v0/executions/', params=params).json()['results']: click.echo( 'Stopping #{counter}... '.format(counter=execution['counter']), nl=False) resp = request('post', execution['urls']['stop']) click.echo(resp.text) success('Done.')
def delete_execution(project, counter, purge_outputs=False): execution_url = '/api/v0/executions/{project_id}:{counter}/'.format( project_id=project.id, counter=counter) try: execution = request('get', execution_url).json() except APIError as ae: # pragma: no cover if ae.response.status_code == 404: return False raise if purge_outputs: for output_datum in execution.get('outputs', ()): if not output_datum.get('purged'): click.echo('#{counter}: Purging output {name}... '.format( counter=execution['counter'], name=output_datum['name'], )) purge_url = '/api/v0/data/{datum_id}/purge/'.format( datum_id=output_datum['id']) resp = request('post', purge_url, handle_errors=False) if resp.status_code >= 400: # pragma: no cover warn( 'Error purging output: {error}; leaving this execution alone!' .format(error=resp.text)) return False click.echo('Deleting #{counter}... '.format(counter=execution['counter'])) resp = request('delete', execution_url, handle_errors=False) if resp.status_code >= 400: # pragma: no cover warn('Error deleting execution: {error}'.format(error=resp.text)) return False return True
def delete_execution(project: Project, counter: int, purge_outputs: bool = False) -> bool: execution_url = f'/api/v0/executions/{project.id}:{counter}/' try: execution = request('get', execution_url).json() except APIError as ae: # pragma: no cover if ae.response.status_code == 404: return False raise if purge_outputs: for output_datum in execution.get('outputs', ()): if not output_datum.get('purged'): progress('#{counter}: Purging output {name}... '.format( counter=execution['counter'], name=output_datum['name'], )) purge_url = f"/api/v0/data/{output_datum['id']}/purge/" resp = request('post', purge_url, handle_errors=False) if resp.status_code >= 400: # pragma: no cover warn( f'Error purging output: {resp.text}; leaving this execution alone!' ) return False progress(f"Deleting #{execution['counter']}... ") resp = request('delete', execution_url, handle_errors=False) if resp.status_code >= 400: # pragma: no cover warn(f'Error deleting execution: {resp.text}') return False return True
def test_api_error(logged_in, capsys): with requests_mock.mock() as m: m.get('https://app.valohai.com/api/foo/', json={'error': 'Oh no!'}, status_code=406) with pytest.raises(APIError) as aei: request('get', 'https://app.valohai.com/api/foo/') aei.value.show() out, err = capsys.readouterr() assert err.startswith('Error: {"error": "Oh no!"}')
def test_user_agent(monkeypatch, logged_in, prefix): if prefix: monkeypatch.setattr(settings, 'api_user_agent_prefix', prefix) with requests_mock.mock() as m: m.get("http://192.168.1.1/", content=b'ok') request("GET", "http://192.168.1.1/") user_agent = m.last_request.headers["User-Agent"] assert 'valohai-cli' in user_agent if prefix: assert user_agent.startswith(prefix)
def test_api_error(logged_in, capsys): nonce = get_random_string() message = f'Oh no! {nonce}' with requests_mock.mock() as m: m.get('https://app.valohai.com/api/foo/', json={'error': message}, status_code=406) with pytest.raises(APIError) as aei: request('get', 'https://app.valohai.com/api/foo/') aei.value.show() out, err = capsys.readouterr() assert message in err
def get_execution_from_counter(self, counter, detail=False): results = request('get', '/api/v0/executions/', params={ 'project': self.id, 'counter': counter }).json()['results'] assert len(results) <= 1 if not results: raise ValueError( 'Execution #{counter} does not exist'.format(counter=counter)) obj = results[0] if detail: obj = request('get', obj['url']).json() return obj
def environments(gpu: bool, price: bool, queue: bool, description: bool) -> None: """ List all available execution environments. """ envs_data = request('get', '/api/v0/environments/', params={ 'limit': 9000 }).json()['results'] envs_data.sort(key=itemgetter('name')) columns_and_headers = filter(None, [ ('name', 'Name'), ('slug', 'Slug'), ('gpu_spec', 'GPU Specification') if gpu else None, ('description', 'Description') if description else None, ('per_hour_price_usd', 'Per-Hour USD$') if price else None, ('per_user_queue_quota', 'Per-User Quota') if queue else None, ('unfinished_job_count', 'Jobs in Queue') if queue else None, ]) columns, headers = zip(*columns_and_headers) for env in envs_data: if 'per_user_queue_quota' in env and env['per_user_queue_quota'] <= 0: env['per_user_queue_quota'] = 'unlimited' print_table( data=envs_data, columns=columns, headers=headers, )
def choose_project(dir, spec=None): """ Choose a project, possibly interactively. :param dir: Directory (only used for prompts) :param spec: An optional search string :return: project object or None """ projects = request('get', '/api/v0/projects/', params={'count': '1000'}).json()['results'] if not projects: if click.confirm('You don\'t have any projects. Create one instead?'): raise NewProjectInstead() return None if spec: projects = filter_projects(projects, spec) if not projects: warn('No projects match %s' % spec) return None if len(projects) == 1: return projects[0] def nonlist_validator(answer): if answer.startswith('n'): raise NewProjectInstead() prompt = 'Which project would you like to link with {dir}?\nEnter [n] to create a new project.'.format( dir=click.style(dir, bold=True), ) return prompt_from_list(projects, prompt, nonlist_validator)
def _upload_commit_code(project: Project, tarball: str, description: str = '') -> dict: size = os.stat(tarball).st_size click.echo(f'Uploading {filesizeformat(size)}...') with open(tarball, 'rb') as tarball_fp: upload = MultipartEncoder({ 'data': ('data.tgz', tarball_fp, 'application/gzip'), 'description': description, }) prog = click.progressbar(length=upload.len, width=0) # Don't bother with the bar if the upload is small prog.is_hidden = (size < 524288) # type: ignore[attr-defined] with prog: def callback(upload: Any) -> None: prog.pos = upload.bytes_read # type: ignore[attr-defined] prog.update(0) # Step is 0 because we set pos above monitor = MultipartEncoderMonitor(upload, callback) commit_obj: dict = request( 'post', f'/api/v0/projects/{project.id}/import-package/', data=monitor, headers={'Content-Type': monitor.content_type}, ).json() success(f"Uploaded ad-hoc code {commit_obj['identifier']}") return commit_obj
def list(): """ List all projects. """ projects_data = request('get', '/api/v0/projects/', params={'count': 9000}).json()['results'] projects_data.sort(key=itemgetter('name')) print_table(projects_data, ['name', 'description'])
def open() -> None: """ Open the project's view in a web browser. """ project = get_project(require=True) project_data = request('get', f'/api/v0/projects/{project.id}/').json() open_browser(project_data)
def open(): """ Open the project's view in a web browser. """ project = get_project(require=True) project_data = request('get', '/api/v0/projects/{id}/'.format(id=project.id)).json() open_browser(project_data)
def list(count: int) -> None: """ Show a list of data aliases in the project. """ params = { 'limit': count, 'ordering': '-ctime', 'deleted': 'false', 'no_count': 'true' } project = get_project(require=True) assert project if project: params['project'] = project.id aliases = request('get', '/api/v0/datum-aliases/', params=params).json()['results'] if settings.output_format == 'json': return print_json(aliases) if not aliases: info(f'{project}: No data aliases.') return for alias in aliases: alias['url'] = f'datum://{alias["name"]}' alias['datum'] = 'No target' if not alias['datum'] else alias['datum'][ 'name'] print_table( aliases, columns=['name', 'datum', 'mtime', 'url'], headers=['Name', 'Data', 'Last Modified', 'URL'], )
def prompt_for_value(self, ctx: Context) -> Optional[str]: try: options: List[str] = request( 'get', '/api/v0/projects/ownership_options/').json() except APINotFoundError: # Endpoint not there, ah well! return None except APIError as ae: warn(f'Unable to retrieve ownership options: {ae}') return None if not options: return None if len(options) == 1: return options[0] print('Who should own this project? The options available to you are:') for option in options: print(f' * {option}') return str( prompt( self.prompt, default=options[0], type=click.Choice(options), show_choices=False, value_proc=lambda x: self.process_value(ctx, x), ))
def stop( counters: Optional[Union[List[str], Tuple[str]]] = None, all: bool = False, ) -> None: """ Stop one or more in-progress executions. """ project = get_project(require=True) assert project if counters and len( counters) == 1 and counters[0] == 'all': # pragma: no cover # Makes sense to support this spelling too. counters = None all = True if counters and all: # If we spell out latest and ranges in the error message, it becomes kinda # unwieldy, so let's just do this. raise click.UsageError( 'Pass execution counter(s), or `--all`, not both.') counters = list(counters or []) executions = get_executions_for_stop( project, counters=counters, all=all, ) for execution in executions: progress(f"Stopping #{execution['counter']}... ") resp = request('post', execution['urls']['stop']) info(resp.text) success('Done.')
def create_project(directory: str, name: str, description: str = '', owner: Optional[str] = None, link: bool = True, yes: bool = False) -> None: """ Internal API for creating a project. """ project_data = request('post', '/api/v0/projects/', data=compact_dict({ 'name': name, 'description': description, 'owner': owner, })).json() long_name = '{}/{}'.format( project_data["owner"]["username"], project_data["name"], ) success(f'Project {long_name} created.') if link: current_project = get_project(directory) if current_project and not yes: if not click.confirm( 'The directory is already linked to {project}. Override that?' .format(project=current_project.name, )): return set_project_link(directory, project_data, inform=True) else: info('Links left alone.')
def start_pipeline( config: Config, pipeline: Pipeline, project_id: str, commit: str, title: Optional[str] = None, ) -> None: payload: Dict[str, Any] = { "project": project_id, "title": title or pipeline.name, **PipelineConverter(config=config, commit_identifier=commit).convert_pipeline(pipeline), } resp = request( method='post', url='/api/v0/pipelines/', json=payload, ).json() success( f"Pipeline ={resp.get('counter')} queued. See {resp.get('urls').get('display')}" )
def list(count: int) -> None: """ Show a list of data in the project. """ params = { 'limit': count, 'ordering': '-ctime', 'deleted': 'false', 'no_count': 'true' } project = get_project(require=True) assert project if project: params['project'] = project.id data = request('get', '/api/v0/data/', params=params).json()['results'] if settings.output_format == 'json': return print_json(data) if not data: info(f'{project}: No data.') return for datum in data: datum['url'] = f'datum://{datum["id"]}' datum['execution_string'] = 'Not from exec' if not datum['output_execution'] else \ f'#{datum["output_execution"]["counter"]}' datum['size'] = convert_size(datum['size']) datum['uri'] = 'No URI' if not datum['uri'] else datum['uri'] print_table( data, columns=['name', 'size', 'execution_string', 'ctime', 'url', 'uri'], headers=['Name', 'Size', 'Output of Exec', 'Created At', 'URL', 'URI'], )
def download_outputs(outputs: List[dict], output_path: str, show_success_message: bool = True) -> None: total_size = sum(o['size'] for o in outputs) num_width = len(str(len(outputs))) # How many digits required to print the number of outputs start_time = time.time() with \ click.progressbar(length=total_size, show_pos=True, item_show_func=str) as prog, \ requests.Session() as dl_sess: for i, output in enumerate(outputs, 1): name = output['name'] url = request( method='get', url=f"/api/v0/data/{output['id']}/download/", ).json()['url'] out_path = os.path.join(output_path, name) out_dir = os.path.dirname(out_path) if not os.path.isdir(out_dir): os.makedirs(out_dir) resp = dl_sess.get(url, stream=True) resp.raise_for_status() prog.current_item = f'({str(i).rjust(num_width)}/{str(len(outputs)).ljust(num_width)}) {name}' # Force visible bar for the smallest of files: prog.short_limit = 0 # type: ignore[attr-defined] with open(out_path, 'wb') as outf: for chunk in resp.iter_content(chunk_size=131072): prog.update(len(chunk)) outf.write(chunk) duration = time.time() - start_time if show_success_message: success(f'Downloaded {len(outputs)} outputs ({total_size} bytes) in {round(duration, 2)} seconds')
def execute(self, **kwargs): """ Execute the creation of the execution. (Heh.) This is the Click callback for this command. :param kwargs: Assorted kwargs (as passed in by Click). :return: Naught """ options, parameters, inputs = self._sift_kwargs(kwargs) commit = self.resolve_commit(self.commit) payload = { 'commit': commit, 'inputs': inputs, 'parameters': parameters, 'project': self.project.id, 'step': self.step.name, } if self.environment: payload['environment'] = self.environment resp = request('post', '/api/v0/executions/', json=payload).json() success('Execution #{counter} created. See {link}'.format( counter=resp['counter'], link=resp['urls']['display'], )) if self.watch: ctx = get_current_context() from valohai_cli.commands.execution.watch import watch ctx.invoke(watch, counter=resp['counter'])
def list(status: str, count: int) -> None: """ Show a list of executions for the project. """ project = get_project(require=True) assert project params = { 'project': project.id, 'limit': count, 'ordering': '-counter', 'deleted': 'false', } if status: params['status'] = set(status) executions = request('get', '/api/v0/executions/', params=params).json()['results'] if settings.output_format == 'json': return print_json(executions) if not executions: info(f'{project}: No executions.') return for execution in executions: execution['url'] = execution['urls']['display'] execution['duration'] = str( timedelta(seconds=round(execution['duration']) ) if execution['duration'] else '').rjust(10) print_table( executions, columns=['counter', 'status', 'step', 'duration', 'url'], headers=['#', 'Status', 'Step', 'Duration', 'URL'], )
def get_execution_outputs(execution: dict) -> List[dict]: return list(request( method='get', url='/api/v0/data/', params={ 'output_execution': execution['id'], 'limit': 9000, }, ).json().get('results', []))
def refresh_details(self) -> None: """ Refresh the project details from the API. """ data = request( 'get', f'/api/v0/projects/{self.id}/', ).json() self.data.update(data)
def execute(self, **kwargs: Any) -> None: """ Execute the creation of the execution. (Heh.) This is the Click callback for this command. :param kwargs: Assorted kwargs (as passed in by Click). :return: Naught """ options, parameters, inputs = self._sift_kwargs(kwargs) payload = { 'commit': self.commit, 'inputs': inputs, 'parameters': parameters, 'project': self.project.id, 'step': self.step.name, } if self.environment: payload['environment'] = self.environment if self.image: payload['image'] = self.image if self.title: payload['title'] = self.title if self.environment_variables: payload['environment_variables'] = self.environment_variables if self.tags: payload['tags'] = self.tags if self.runtime_config: payload['runtime_config'] = self.runtime_config resp = request( method='post', url='/api/v0/executions/', json=payload, api_error_class=ExecutionCreationAPIError, ).json() success( f"Execution #{resp['counter']} created. See {resp['urls']['display']}" ) ctx = get_current_context() if self.download_directory: from valohai_cli.commands.execution.outputs import outputs as outputs_command ctx.invoke( outputs_command, counter=resp['counter'], sync=True, download_directory=self.download_directory, ) if self.watch: from valohai_cli.commands.execution.watch import watch ctx.invoke(watch, counter=resp['counter'])
def choose_project(dir: str, spec: Optional[str] = None) -> Optional[dict]: """ Choose a project, possibly interactively. :param dir: Directory (only used for prompts) :param spec: An optional search string :return: project object or None """ projects: List[dict] = request('get', '/api/v0/projects/', params={ 'limit': '1000' }).json()['results'] if not projects: if click.confirm('You don\'t have any projects. Create one instead?'): raise NewProjectInstead() return None if spec: projects = filter_projects(projects, spec) if not projects: warn(f'No projects match {spec}') return None if len(projects) == 1: return projects[0] def nonlist_validator(answer: str) -> Any: if answer.startswith('n'): raise NewProjectInstead() prompt = 'Which project would you like to link with {dir}?\nEnter [n] to create a new project.'.format( dir=click.style(dir, bold=True), ) has_multiple_owners = (len( {p.get('owner', {}).get('id') for p in projects}) > 1) def project_name_formatter(project: dict) -> str: name: str = project['name'] try: if has_multiple_owners: dim_owner = click.style(project['owner']['username'] + '/', dim=True) return f'{dim_owner}{name}' except Exception: pass return name projects.sort(key=lambda project: project_name_formatter(project).lower()) return prompt_from_list(projects, prompt, nonlist_validator, name_formatter=project_name_formatter)
def get_execution_from_counter(self, counter, params=None): try: return request( method='get', url='/api/v0/executions/{project_id}:{counter}/'.format( project_id=self.id, counter=counter), params=(params or {}), ).json() except APIError as ae: if ae.response.status_code == 404: raise NoExecution('Execution #{counter} does not exist'.format( counter=counter)) raise
def _get_pre_existing_commit(tarball: str) -> Optional[dict]: try: # This is the same mechanism used by the server to # calculate the identifier for an ad-hoc tarball. with open(tarball, 'rb') as tarball_fp: commit_identifier = f'~{get_fp_sha256(tarball_fp)}' # See if we have a commit with that identifier commit_obj: Dict[str, Any] = request('get', f'/api/v0/commits/{commit_identifier}/').json() return (commit_obj if commit_obj.get('adhoc') else None) except APIError: # In the case of any API errors, let's just assume the commit doesn't exist. return None
def load_full_commit(self, identifier: Optional[str] = None) -> dict: """ Load the commit object including config data (as a dict) from the Valohai host for the given commit identifier. :param identifier: Identifier; None to use the latest commit on the server. """ for commit in self.load_commit_list(): if commit.get('adhoc'): continue if not identifier or commit['identifier'] == identifier: data = request(method='get', url=commit['url'], params={'include': 'config'}).json() assert isinstance(data, dict) return data raise ValueError(f'No commit found for commit {identifier}')
def resolve_commit(self, commit): if not commit: commit = git.get_current_commit(self.project.directory) commits = request( 'get', '/api/v0/projects/{id}/commits/'.format( id=self.project.id)).json() by_identifier = {c['identifier']: c for c in commits} if commit not in by_identifier: warn( 'Commit {commit} is not known for the project. Have you pushed it?' .format(commit=commit)) raise click.Abort() return commit