def watch(counter: str, force: bool, filter_download: Optional[str], download_directory: Optional[str]) -> None: if download_directory: info( f"Downloading to: {download_directory}\nWaiting for new outputs..." ) else: warn('Target folder is not set. Use --download to set it.') return project = get_project(require=True) execution = project.get_execution_from_counter( counter=counter, params={'exclude': 'outputs'}, ) while True: outputs = get_execution_outputs(execution) outputs = filter_outputs(outputs, download_directory, filter_download, force) if outputs: download_outputs(outputs, download_directory, show_success_message=False) if execution['status'] in complete_execution_statuses: info('Execution has finished.') return time.sleep(1)
def list(count: int) -> None: """ Show a list of data in the project. """ params = { 'limit': count, 'ordering': '-ctime', 'deleted': 'false', 'no_count': 'true' } project = get_project(require=True) assert project if project: params['project'] = project.id data = request('get', '/api/v0/data/', params=params).json()['results'] if settings.output_format == 'json': return print_json(data) if not data: info(f'{project}: No data.') return for datum in data: datum['url'] = f'datum://{datum["id"]}' datum['execution_string'] = 'Not from exec' if not datum['output_execution'] else \ f'#{datum["output_execution"]["counter"]}' datum['size'] = convert_size(datum['size']) datum['uri'] = 'No URI' if not datum['uri'] else datum['uri'] print_table( data, columns=['name', 'size', 'execution_string', 'ctime', 'url', 'uri'], headers=['Name', 'Size', 'Output of Exec', 'Created At', 'URL', 'URI'], )
def list(status: str, count: int) -> None: """ Show a list of executions for the project. """ project = get_project(require=True) assert project params = { 'project': project.id, 'limit': count, 'ordering': '-counter', 'deleted': 'false', } if status: params['status'] = set(status) executions = request('get', '/api/v0/executions/', params=params).json()['results'] if settings.output_format == 'json': return print_json(executions) if not executions: info(f'{project}: No executions.') return for execution in executions: execution['url'] = execution['urls']['display'] execution['duration'] = str( timedelta(seconds=round(execution['duration']) ) if execution['duration'] else '').rjust(10) print_table( executions, columns=['counter', 'status', 'step', 'duration', 'url'], headers=['#', 'Status', 'Step', 'Duration', 'URL'], )
def list(count: int) -> None: """ Show a list of data aliases in the project. """ params = { 'limit': count, 'ordering': '-ctime', 'deleted': 'false', 'no_count': 'true' } project = get_project(require=True) assert project if project: params['project'] = project.id aliases = request('get', '/api/v0/datum-aliases/', params=params).json()['results'] if settings.output_format == 'json': return print_json(aliases) if not aliases: info(f'{project}: No data aliases.') return for alias in aliases: alias['url'] = f'datum://{alias["name"]}' alias['datum'] = 'No target' if not alias['datum'] else alias['datum'][ 'name'] print_table( aliases, columns=['name', 'datum', 'mtime', 'url'], headers=['Name', 'Data', 'Last Modified', 'URL'], )
def stop( counters: Optional[Union[List[str], Tuple[str]]] = None, all: bool = False, ) -> None: """ Stop one or more in-progress executions. """ project = get_project(require=True) assert project if counters and len( counters) == 1 and counters[0] == 'all': # pragma: no cover # Makes sense to support this spelling too. counters = None all = True if counters and all: # If we spell out latest and ranges in the error message, it becomes kinda # unwieldy, so let's just do this. raise click.UsageError( 'Pass execution counter(s), or `--all`, not both.') counters = list(counters or []) executions = get_executions_for_stop( project, counters=counters, all=all, ) for execution in executions: progress(f"Stopping #{execution['counter']}... ") resp = request('post', execution['urls']['stop']) info(resp.text) success('Done.')
def create_project(directory: str, name: str, description: str = '', owner: Optional[str] = None, link: bool = True, yes: bool = False) -> None: """ Internal API for creating a project. """ project_data = request('post', '/api/v0/projects/', data=compact_dict({ 'name': name, 'description': description, 'owner': owner, })).json() long_name = '{}/{}'.format( project_data["owner"]["username"], project_data["name"], ) success(f'Project {long_name} created.') if link: current_project = get_project(directory) if current_project and not yes: if not click.confirm( 'The directory is already linked to {project}. Override that?' .format(project=current_project.name, )): return set_project_link(directory, project_data, inform=True) else: info('Links left alone.')
def login( username: str, password: str, token: Optional[str], host: Optional[str], yes: bool, verify_ssl: bool, ) -> None: """Log in into Valohai.""" if settings.user and settings.token: current_username = settings.user['username'] current_host = settings.host if not yes: click.confirm(( f'You are already logged in as {current_username} on {current_host}.\n' 'Are you sure you wish to acquire a new token?'), abort=True) else: info( f'--yes set: ignoring pre-existing login for {current_username} on {current_host}' ) if not (token or username or password or host): # Don't show the banner if this seems like a non-interactive login. click.secho(f'Welcome to Valohai CLI {__version__}!', bold=True) host = validate_host(host) if token: if username or password: error('Token is mutually exclusive with username/password') raise Exit(1) click.echo(f'Using token {token[:5]}... to log in.') else: token = do_user_pass_login( host=host, username=username, password=password, verify_ssl=verify_ssl, ) click.echo(f'Verifying API token on {host}...') with APISession(host, token, verify_ssl=verify_ssl) as sess: user_data = sess.get('/api/v0/users/me/').json() settings.persistence.update( host=host, user=user_data, token=token, verify_ssl=verify_ssl, ) settings.persistence.save() success(f"Logged in. Hey {user_data.get('username', 'there')}!") if not verify_ssl: warn( "SSL verification is off. This may leave you vulnerable to man-in-the-middle attacks." )
def configure_project_override(project_id: str, mode: Optional[str], directory: Optional[str] = None) -> None: if not directory: directory = os.getcwd() if not mode: yaml_filename = os.path.join(directory, 'valohai.yaml') if os.path.isfile(yaml_filename): info(f'{yaml_filename} exists, assuming local project') mode = 'local' else: info(f'{yaml_filename} does not exist, assuming remote project') mode = 'remote' if not settings.set_override_project(project_id, directory=directory, mode=mode): raise click.Abort()
def step(filenames: List[str], yaml: Optional[str]) -> None: """ Update a step config(s) in valohai.yaml based on Python source file(s). Example: vh yaml step hello.py :param filenames: Path(s) of the Python source code files. """ project = get_project() if project is None: info("no project linked - assuming files are in current directory.") project = Project(data={}, directory=os.getcwd()) project.name = "YAML command simulated project" config_path = project.get_config_filename(yaml_path=yaml) yaml = yaml or project.get_yaml_path() for source_path in filenames: if not os.path.isfile(config_path): update_yaml_from_source(source_path, project, yaml) info(f"{yaml} generated.") create_or_update_requirements(project) elif yaml_needs_update(source_path, project, yaml): update_yaml_from_source(source_path, project, yaml) info(f"{yaml} updated.") create_or_update_requirements(project) else: info(f"{yaml} already up-to-date.")
def set_override_project(self, project_id: str, directory: str, mode: str) -> bool: from valohai_cli.api import request from valohai_cli.models.project import Project from valohai_cli.models.remote_project import RemoteProject assert mode in ('local', 'remote') try: project_data = request('get', f'/api/v0/projects/{project_id}/').json() project_cls = (RemoteProject if mode == 'remote' else Project) project = self.override_project = project_cls(data=project_data, directory=directory) mode_fmt = ('local mode' if mode == 'local' else 'remote mode') info(f'Using project {project.name} in {mode_fmt} (in {project.directory})') return True except APINotFoundError: error(f'No project was found with the ID {project_id} (via --project or VALOHAI_PROJECT)') return False
def create_or_update_requirements(project: Project) -> None: """ Makes sure valohai-utils is in requirements.txt file or creates a new requirements.txt with valohai-utils. :param project: Currently linked Valohai project """ requirements_path = os.path.join(project.directory, "requirements.txt") requirements_lines = [] if os.path.isfile(requirements_path): with open(requirements_path) as requirements: requirements_lines = list(requirements) if not any('valohai-utils' in line for line in requirements_lines): requirements_lines.append('valohai-utils\n') with open(requirements_path, 'w') as requirements: requirements.write("".join(requirements_lines)) info("valohai-utils added to requirements.txt")
def step(filenames: List[str]) -> None: """ Update a step config(s) in valohai.yaml based on Python source file(s). Example: vh yaml step hello.py :param filenames: Path(s) of the Python source code files. """ project = get_project() assert project config_path = project.get_config_filename() for source_path in filenames: if not os.path.isfile(config_path): update_yaml_from_source(source_path, project) info("valohai.yaml generated.") create_or_update_requirements(project) elif yaml_needs_update(source_path, project): update_yaml_from_source(source_path, project) info("valohai.yaml updated.") create_or_update_requirements(project) else: info("valohai.yaml already up-to-date.")
def get_files_for_package( dir: str, allow_git: bool = True, ignore_patterns: Iterable[str] = (), ) -> Dict[str, PackageFileInfo]: """ Get files to package for ad-hoc packaging from the file system. :param dir: The source directory. Probably a working copy root or similar. :param allow_git: Whether to allow usage of `git ls-files`, if available, for packaging. :param ignore_patterns: List of ignored patterns. :return: """ files_and_paths = [] git_usage, vhignore_usage, ftup_generator = _get_files(dir, allow_git=allow_git) for ftup in ftup_generator: if ignore_patterns and not is_valid_path(ftup[1], ignore_patterns): continue files_and_paths.append(ftup) if len(files_and_paths) > FILE_COUNT_HARD_THRESHOLD: raise PackageTooLarge( f'Trying to package too many files (threshold: {FILE_COUNT_HARD_THRESHOLD}).' ) info( _get_packaging_info_message(len(files_and_paths), git_usage, vhignore_usage)) output_stats = {} for file, file_path in files_and_paths: try: output_stats[file] = PackageFileInfo(source_path=file_path, stat=os.stat(file_path)) except FileNotFoundError: # A file was reported by git-ls but not found on disk - don't try to package it. pass return output_stats
def get_files_for_package( dir: str, allow_git: bool = True, ignore_patterns: Iterable[str] = (), ) -> Dict[str, PackageFileInfo]: """ Get files to package for ad-hoc packaging from the file system. :param dir: The source directory. Probably a working copy root or similar. :param allow_git: Whether to allow usage of `git ls-files`, if available, for packaging. :param ignore_patterns: List of ignored patterns. :return: """ files_and_paths = [] using_git, ftup_generator = _get_files(dir, allow_git=allow_git) for ftup in ftup_generator: if ignore_patterns and not is_valid_path(ftup[1], ignore_patterns): continue files_and_paths.append(ftup) if len(files_and_paths) > FILE_COUNT_HARD_THRESHOLD: raise PackageTooLarge( 'Trying to package too many files (threshold: {threshold}).'. format(threshold=FILE_COUNT_HARD_THRESHOLD, )) if using_git: info(f'Used git to find {len(files_and_paths)} files to package') else: info( f'Git not available, found {len(files_and_paths)} files to package' ) output_stats = {} for file, file_path in files_and_paths: output_stats[file] = PackageFileInfo(source_path=file_path, stat=os.stat(file_path)) return output_stats
def run( ctx: click.Context, *, adhoc: bool, args: List[str], commit: Optional[str], download_directory: Optional[str], environment: Optional[str], environment_variables: List[str], image: Optional[str], step: Optional[str], tags: List[str], title: Optional[str], validate_adhoc: bool, watch: bool, ) -> Any: """ Start an execution of a step. """ # Having to explicitly compare to `--help` is slightly weird, but it's because of the nested command thing. if step == '--help' or not step: click.echo(ctx.get_help(), color=ctx.color) try: config = get_project(require=True).get_config( commit_identifier=commit) if config.steps: click.secho( '\nThese steps are available in the selected commit:\n', color=ctx.color, bold=True) for step in sorted(config.steps): click.echo(f' * {step}', color=ctx.color) except: # If we fail to extract the step list, it's not that big of a deal. pass ctx.exit() project = get_project(require=True) if adhoc: if commit: raise click.UsageError( '--commit and --adhoc are mutually exclusive.') if project.is_remote: raise click.UsageError( '--adhoc can not be used with remote projects.') if download_directory and watch: raise click.UsageError( 'Combining --sync and --watch not supported yet.') if not commit and project.is_remote: # For remote projects, we need to resolve early. commit = project.resolve_commit()['identifier'] info('Using remote project {name}\'s newest commit {commit}'.format( name=project.name, commit=commit, )) # We need to pass commit=None when adhoc=True to `get_config`, but # the further steps do need the real commit identifier from remote, # so this is done before `commit` is mangled by `create_adhoc_commit`. config = project.get_config(commit_identifier=commit) matched_step = match_step(config, step) step = config.steps[matched_step] rc = RunCommand( project=project, step=step, commit=commit, environment=environment, watch=watch, download_directory=download_directory, image=image, title=title, environment_variables=parse_environment_variable_strings( environment_variables), tags=tags, ) with rc.make_context( rc.name, list(args), parent=ctx) as child_ctx: # type: ignore[attr-defined] if adhoc: rc.commit = package_adhoc_commit( project, validate=validate_adhoc)['identifier'] return rc.invoke(child_ctx)
def login(username: str, password: str, token: Optional[str], host: Optional[str], yes: bool) -> None: """Log in into Valohai.""" host = ( host # Explicitly set for this command, ... or settings.overrides.get('host') # ... or from the top-level CLI (or envvar) ... or default_app_host # ... or the global default ) if settings.user and settings.token: user = settings.user current_username = user['username'] if not yes: message = ( 'You are already logged in as {username}.\n' 'Are you sure you wish to acquire a new token?' ).format(username=current_username) click.confirm(message, abort=True) else: info(f'--yes set: ignoring pre-existing login for {current_username}') if token: if username or password: error('Token is mutually exclusive with username/password') raise Exit(1) click.echo(f'Using token {token[:5]}... to log in.') else: if not (username or password): click.secho(f'Welcome to Valohai CLI {__version__}!', bold=True) click.echo(f'\nIf you don\'t yet have an account, please create one at {host} first.\n') if not username: username = click.prompt('Username').strip() else: click.echo(f'Username: {username}') if not password: password = click.prompt('Password', hide_input=True) click.echo('Retrieving API token...') with APISession(host) as sess: try: token_data = sess.post('/api/v0/get-token/', data={ 'username': username, 'password': password, }).json() token = token_data['token'] except APIError as ae: code = ae.code if code in ('has_external_identity', 'has_2fa'): command = 'vh login --token TOKEN_HERE ' if host != default_app_host: command += f'--host {host}' banner(TOKEN_LOGIN_HELP.format(code=code, host=host, command=command)) raise click.echo('Verifying API token...') with APISession(host, token) as sess: user_data = sess.get('/api/v0/users/me/').json() settings.persistence.update(host=host, user=user_data, token=token) settings.persistence.save() success(f"Logged in. Hey {user_data.get('username', 'there')}!")
def run( ctx: click.Context, *, adhoc: bool, args: List[str], commit: Optional[str], yaml: Optional[str], download_directory: Optional[str], environment: Optional[str], environment_variables: List[str], image: Optional[str], step_name: Optional[str], tags: List[str], title: Optional[str], validate_adhoc: bool, watch: bool, debug_port: int, debug_key_file: Optional[str], ) -> Any: """ Start an execution of a step. """ # Having to explicitly compare to `--help` is slightly weird, but it's because of the nested command thing. if step_name == '--help' or not step_name: click.echo(ctx.get_help(), color=ctx.color) print_step_list(ctx, commit) ctx.exit() return project = get_project(require=True) project.refresh_details() if download_directory and watch: raise click.UsageError( 'Combining --sync and --watch not supported yet.') if not commit and project.is_remote: # For remote projects, we need to resolve early. commit = project.resolve_commit()['identifier'] info(f'Using remote project {project.name}\'s newest commit {commit}') # We need to pass commit=None when adhoc=True to `get_config`, but # the further steps do need the real commit identifier from remote, # so this is done before `commit` is mangled by `create_adhoc_commit`. config = project.get_config(commit_identifier=commit, yaml_path=yaml) matched_step = match_step(config, step_name) step = config.steps[matched_step] commit = create_or_resolve_commit( project, commit=commit, adhoc=adhoc, validate_adhoc_commit=validate_adhoc, yaml_path=yaml, ) runtime_config = {} # type: dict[str, Any] if bool(debug_port) ^ bool(debug_key_file): raise click.UsageError( "Both or neither of --debug-port and --debug-key-file must be set." ) if debug_port and debug_key_file: runtime_config["debug_port"] = debug_port with open(debug_key_file, "r") as file: key = file.read().strip() if not key.startswith("ssh"): raise click.UsageError( f"The public key read from {debug_key_file} " f"does not seem valid (it should start with `ssh`)") runtime_config["debug_key"] = key rc = RunCommand( project=project, step=step, commit=commit, environment=environment, watch=watch, download_directory=download_directory, image=image, title=title, environment_variables=parse_environment_variable_strings( environment_variables), tags=tags, runtime_config=runtime_config, ) with rc.make_context(rc.name, list(args), parent=ctx) as child_ctx: return rc.invoke(child_ctx)