def choose_image() -> str: image_suggestions = get_image_suggestions() click.echo( 'Now let\'s pick a Docker image to use with your code.\n' + ( 'Here are some recommended choices, but feel free to type in one of ' 'your own from the ones available at https://hub.docker.com/' if image_suggestions else '' ) ) while True: image = prompt_from_list( image_suggestions, ( 'Choose a number or enter a Docker image name.' if image_suggestions else 'Enter a Docker image name.' ), nonlist_validator=lambda s: s.strip() ) if isinstance(image, dict): image_name = str(image['name']) else: image_name = str(image) if click.confirm(f'Is {click.style(image_name, bold=True)} correct?'): break success(f'Great! Using {image_name}.') return image_name
def download_outputs(outputs: List[dict], output_path: str, show_success_message: bool = True) -> None: total_size = sum(o['size'] for o in outputs) num_width = len(str(len(outputs))) # How many digits required to print the number of outputs start_time = time.time() with \ click.progressbar(length=total_size, show_pos=True, item_show_func=str) as prog, \ requests.Session() as dl_sess: for i, output in enumerate(outputs, 1): name = output['name'] url = request( method='get', url=f"/api/v0/data/{output['id']}/download/", ).json()['url'] out_path = os.path.join(output_path, name) out_dir = os.path.dirname(out_path) if not os.path.isdir(out_dir): os.makedirs(out_dir) resp = dl_sess.get(url, stream=True) resp.raise_for_status() prog.current_item = f'({str(i).rjust(num_width)}/{str(len(outputs)).ljust(num_width)}) {name}' # Force visible bar for the smallest of files: prog.short_limit = 0 # type: ignore[attr-defined] with open(out_path, 'wb') as outf: for chunk in resp.iter_content(chunk_size=131072): prog.update(len(chunk)) outf.write(chunk) duration = time.time() - start_time if show_success_message: success(f'Downloaded {len(outputs)} outputs ({total_size} bytes) in {round(duration, 2)} seconds')
def parcel( destination: Optional[str], commit: Optional[str], code: str, valohai_local_run: bool, docker_images: bool, unparcel_script: bool, ) -> None: project = get_project(require=True) if not destination: destination = sanitize_filename( f'{project.name}-parcel-{time.strftime("%Y%m%d-%H%M%S")}') click.echo( f'Packing {click.style(project.name, bold=True, fg="blue")} ' f'to directory {click.style(destination, bold=True, fg="green")}') ensure_makedirs(destination) extra_docker_images: List[str] = [] if code in ('bundle', 'archive', 'tarball'): export_code(project, destination, mode=code) if valohai_local_run: export_valohai_local_run(destination) if docker_images: export_docker_images(project, destination, commit, extra_docker_images) if unparcel_script: write_unparcel_script(destination) success(f'Parcel {destination} created!')
def stop( counters: Optional[Union[List[str], Tuple[str]]] = None, all: bool = False, ) -> None: """ Stop one or more in-progress executions. """ project = get_project(require=True) assert project if counters and len( counters) == 1 and counters[0] == 'all': # pragma: no cover # Makes sense to support this spelling too. counters = None all = True if counters and all: # If we spell out latest and ranges in the error message, it becomes kinda # unwieldy, so let's just do this. raise click.UsageError( 'Pass execution counter(s), or `--all`, not both.') counters = list(counters or []) executions = get_executions_for_stop( project, counters=counters, all=all, ) for execution in executions: progress(f"Stopping #{execution['counter']}... ") resp = request('post', execution['urls']['stop']) info(resp.text) success('Done.')
def unlink(yes): """ Unlink a linked Valohai project. """ dir = get_project_directory() project = get_project() if not project: click.echo('{dir} or its parents do not seem linked to a project.'.format(dir=dir)) return 1 if not yes: click.confirm( 'Unlink {dir} from {name}?'.format( dir=click.style(project.directory, bold=True), name=click.style(project.name, bold=True), ), abort=True, ) links = settings.get('links', {}) links.pop(dir) settings['links'] = links settings.save() success('Unlinked {dir} from {name}.'.format( dir=click.style(dir, bold=True), name=click.style(project.name, bold=True) ))
def create_project(directory: str, name: str, description: str = '', owner: Optional[str] = None, link: bool = True, yes: bool = False) -> None: """ Internal API for creating a project. """ project_data = request('post', '/api/v0/projects/', data=compact_dict({ 'name': name, 'description': description, 'owner': owner, })).json() long_name = '{}/{}'.format( project_data["owner"]["username"], project_data["name"], ) success(f'Project {long_name} created.') if link: current_project = get_project(directory) if current_project and not yes: if not click.confirm( 'The directory is already linked to {project}. Override that?' .format(project=current_project.name, )): return set_project_link(directory, project_data, inform=True) else: info('Links left alone.')
def stop(counters, all=False): """ Stop one or more in-progress executions. """ project = get_project(require=True) params = {'project': project.id} if counters and all: raise click.UsageError( 'Pass either an execution # or `--all`, not both.') elif counters: params['counter'] = sorted(IntegerRange.parse(counters).as_set()) elif all: params['status'] = 'incomplete' else: warn('Nothing to stop (pass #s or `--all`)') return 1 for execution in request('get', '/api/v0/executions/', params=params).json()['results']: click.echo( 'Stopping #{counter}... '.format(counter=execution['counter']), nl=False) resp = request('post', execution['urls']['stop']) click.echo(resp.text) success('Done.')
def _upload_commit_code(project: Project, tarball: str, description: str = '') -> dict: size = os.stat(tarball).st_size click.echo(f'Uploading {filesizeformat(size)}...') with open(tarball, 'rb') as tarball_fp: upload = MultipartEncoder({ 'data': ('data.tgz', tarball_fp, 'application/gzip'), 'description': description, }) prog = click.progressbar(length=upload.len, width=0) # Don't bother with the bar if the upload is small prog.is_hidden = (size < 524288) # type: ignore[attr-defined] with prog: def callback(upload: Any) -> None: prog.pos = upload.bytes_read # type: ignore[attr-defined] prog.update(0) # Step is 0 because we set pos above monitor = MultipartEncoderMonitor(upload, callback) commit_obj: dict = request( 'post', f'/api/v0/projects/{project.id}/import-package/', data=monitor, headers={'Content-Type': monitor.content_type}, ).json() success(f"Uploaded ad-hoc code {commit_obj['identifier']}") return commit_obj
def execute(self, **kwargs): """ Execute the creation of the execution. (Heh.) This is the Click callback for this command. :param kwargs: Assorted kwargs (as passed in by Click). :return: Naught """ options, parameters, inputs = self._sift_kwargs(kwargs) commit = self.resolve_commit(self.commit) payload = { 'commit': commit, 'inputs': inputs, 'parameters': parameters, 'project': self.project.id, 'step': self.step.name, } if self.environment: payload['environment'] = self.environment resp = request('post', '/api/v0/executions/', json=payload).json() success('Execution #{counter} created. See {link}'.format( counter=resp['counter'], link=resp['urls']['display'], )) if self.watch: ctx = get_current_context() from valohai_cli.commands.execution.watch import watch ctx.invoke(watch, counter=resp['counter'])
def download_outputs(outputs, output_path): total_size = sum(o['size'] for o in outputs) num_width = len(str(len( outputs))) # How many digits required to print the number of outputs start_time = time.time() with \ click.progressbar(length=total_size, show_pos=True, item_show_func=force_text) as prog, \ requests.Session() as dl_sess: for i, output in enumerate(outputs, 1): url = output['url'] out_path = os.path.join(output_path, output['name']) out_dir = os.path.dirname(out_path) if not os.path.isdir(out_dir): os.makedirs(out_dir) resp = dl_sess.get(url, stream=True) resp.raise_for_status() prog.current_item = '(%*d/%-*d) %s' % ( num_width, i, num_width, len(outputs), output['name']) with open(out_path, 'wb') as outf: for chunk in resp.iter_content(chunk_size=131072): prog.update(len(chunk)) outf.write(chunk) duration = time.time() - start_time success( 'Downloaded {n} outputs ({size} bytes) in {duration} seconds'.format( n=len(outputs), size=total_size, duration=round(duration, 2), ))
def set_project_link(dir: str, project: dict, inform: bool = False) -> None: settings.set_project_link(dir, project) if inform: success('Linked {dir} to {name}.'.format( dir=click.style(dir, bold=True), name=click.style(project['name'], bold=True) ))
def parcel(destination, commit, code, valohai_local_run, docker_images, unparcel_script): project = get_project(require=True) if not destination: destination = sanitize_filename('{}-parcel-{}'.format( project.name, time.strftime('%Y%m%d-%H%M%S'), )) click.echo('Packing {} to directory {}'.format( click.style(project.name, bold=True, fg='blue'), click.style(destination, bold=True, fg='green'), )) ensure_makedirs(destination) extra_docker_images = [] if code in ('bundle', 'archive', 'tarball'): export_code(project, destination, mode=code) if valohai_local_run: export_valohai_local_run(project, destination) if docker_images: export_docker_images(project, destination, commit, extra_docker_images) if unparcel_script: write_unparcel_script(destination) success('Parcel {} created!'.format(destination))
def start_pipeline( config: Config, pipeline: Pipeline, project_id: str, commit: str, title: Optional[str] = None, ) -> None: payload: Dict[str, Any] = { "project": project_id, "title": title or pipeline.name, **PipelineConverter(config=config, commit_identifier=commit).convert_pipeline(pipeline), } resp = request( method='post', url='/api/v0/pipelines/', json=payload, ).json() success( f"Pipeline ={resp.get('counter')} queued. See {resp.get('urls').get('display')}" )
def unlink(yes: bool) -> None: """ Unlink a linked Valohai project. """ dir = get_project_directory() project = get_project() if not project: warn(f'{dir} or its parents do not seem linked to a project.') return if not yes: click.confirm( 'Unlink {dir} from {name}?'.format( dir=click.style(project.directory, bold=True), name=click.style(project.name, bold=True), ), abort=True, ) links = settings.links.copy() links.pop(dir) settings.persistence.set('links', links) settings.persistence.save() success('Unlinked {dir} from {name}.'.format(dir=click.style(dir, bold=True), name=click.style(project.name, bold=True)))
def choose_command(directory: str) -> str: scripts = sorted(find_scripts(directory)) while True: if scripts: click.echo( 'We found these script files in this directory.\n' 'If any of them is the script file you\'d like to use for Valohai, type its number.\n' 'Otherwise, you can just type the command to run.' ) command = prompt_from_list( [ {'name': f'{interpreter} {script}'} for (interpreter, script) in scripts ], 'Choose a number or enter a command.', nonlist_validator=lambda s: s.strip() ) if isinstance(command, dict): command = command['name'] else: # pragma: no cover command = click.prompt( 'We couldn\'t find script files in this directory.\n' 'Please enter the command you\'d like to run in the Valohai platform.\n' ) if not command: # pragma: no cover error('Please try again.') continue if click.confirm(f'Is {click.style(command, bold=True)} correct?'): break success(f'Got it! Using {command} as the command.') return str(command)
def login( username: str, password: str, token: Optional[str], host: Optional[str], yes: bool, verify_ssl: bool, ) -> None: """Log in into Valohai.""" if settings.user and settings.token: current_username = settings.user['username'] current_host = settings.host if not yes: click.confirm(( f'You are already logged in as {current_username} on {current_host}.\n' 'Are you sure you wish to acquire a new token?'), abort=True) else: info( f'--yes set: ignoring pre-existing login for {current_username} on {current_host}' ) if not (token or username or password or host): # Don't show the banner if this seems like a non-interactive login. click.secho(f'Welcome to Valohai CLI {__version__}!', bold=True) host = validate_host(host) if token: if username or password: error('Token is mutually exclusive with username/password') raise Exit(1) click.echo(f'Using token {token[:5]}... to log in.') else: token = do_user_pass_login( host=host, username=username, password=password, verify_ssl=verify_ssl, ) click.echo(f'Verifying API token on {host}...') with APISession(host, token, verify_ssl=verify_ssl) as sess: user_data = sess.get('/api/v0/users/me/').json() settings.persistence.update( host=host, user=user_data, token=token, verify_ssl=verify_ssl, ) settings.persistence.save() success(f"Logged in. Hey {user_data.get('username', 'there')}!") if not verify_ssl: warn( "SSL verification is off. This may leave you vulnerable to man-in-the-middle attacks." )
def execute(self, **kwargs: Any) -> None: """ Execute the creation of the execution. (Heh.) This is the Click callback for this command. :param kwargs: Assorted kwargs (as passed in by Click). :return: Naught """ options, parameters, inputs = self._sift_kwargs(kwargs) payload = { 'commit': self.commit, 'inputs': inputs, 'parameters': parameters, 'project': self.project.id, 'step': self.step.name, } if self.environment: payload['environment'] = self.environment if self.image: payload['image'] = self.image if self.title: payload['title'] = self.title if self.environment_variables: payload['environment_variables'] = self.environment_variables if self.tags: payload['tags'] = self.tags if self.runtime_config: payload['runtime_config'] = self.runtime_config resp = request( method='post', url='/api/v0/executions/', json=payload, api_error_class=ExecutionCreationAPIError, ).json() success( f"Execution #{resp['counter']} created. See {resp['urls']['display']}" ) ctx = get_current_context() if self.download_directory: from valohai_cli.commands.execution.outputs import outputs as outputs_command ctx.invoke( outputs_command, counter=resp['counter'], sync=True, download_directory=self.download_directory, ) if self.watch: from valohai_cli.commands.execution.watch import watch ctx.invoke(watch, counter=resp['counter'])
def set_project_link(dir, project, inform=False): links = settings.get('links', {}) links[dir] = project settings['links'] = links assert get_project(dir).id == project['id'] settings.save() if inform: success('Linked {dir} to {name}.'.format(dir=click.style(dir, bold=True), name=click.style( project['name'], bold=True)))
def create_adhoc_commit_from_tarball(project: Project, tarball: str, description: str = '') -> Dict[str, Any]: """ Using a precreated ad-hoc tarball, create or retrieve an ad-hoc commit of it on the Valohai host. :param project: Project :param tarball: Tgz tarball path, likely created by the packager :param description: Optional description for the commit :return: Commit response object from API """ commit_obj = _get_pre_existing_commit(tarball) if commit_obj: success(f"Ad-hoc code {commit_obj['identifier']} already uploaded") else: commit_obj = _upload_commit_code(project, tarball, description) return commit_obj
def delete(counters, purge_outputs=False): """ Delete one or more executions, optionally purging their outputs as well. """ project = get_project(require=True) counters = IntegerRange.parse(counters).as_set() n = 0 for counter in sorted(counters): if delete_execution(project, counter, purge_outputs): n += 1 if n: success('Deleted {n} executions.'.format(n=n)) else: warn('Nothing was deleted.') sys.exit(1)
def logout(yes: bool) -> None: """Remove local authentication token.""" user = settings.user token = settings.token if not (user or token): click.echo('You\'re not logged in.') return if user and not yes: click.confirm(( f'You are logged in as {user["username"]} (on {settings.host}).\n' 'Are you sure you wish to remove the authentication token?' ), abort=True) settings.persistence.update(host=None, user=None, token=None) settings.persistence.save() success('Logged out.')
def delete(counters: Sequence[str], purge_outputs: bool = False) -> None: """ Delete one or more executions, optionally purging their outputs as well. """ project = get_project(require=True) assert project n = 0 for counter in sorted(IntegerRange.parse(counters).as_set()): if delete_execution(project, counter, purge_outputs): n += 1 if n: success(f'Deleted {n} executions.') else: warn('Nothing was deleted.') sys.exit(1)
def create_adhoc_commit_from_tarball(*, project: Project, tarball: str, yaml_path: Optional[str] = None, description: str = '') -> Dict[str, Any]: """ Using a precreated ad-hoc tarball, create or retrieve an ad-hoc commit of it on the Valohai host. :param project: Project :param tarball: Tgz tarball path, likely created by the packager :param yaml_path: Optional custom yaml path attached to the command. :param description: Optional description for the commit :return: Commit response object from API """ yaml_path = yaml_path or project.get_yaml_path() commit_obj = _get_pre_existing_commit(tarball) if commit_obj: success(f"Ad-hoc code {commit_obj['identifier']} already uploaded") else: commit_obj = _upload_commit_code(project=project, tarball=tarball, yaml_path=yaml_path, description=description) return commit_obj
def fetch() -> None: """ Fetch new commits for the linked project. """ project = get_project(require=True) resp = request('post', f'/api/v0/projects/{project.id}/fetch/') data = resp.json() commits = data.get('commits', ()) if commits: for commit in commits: success(f"Fetched: {commit['ref']} ({commit['identifier']})") success(f'{len(commits)} new commits were fetched!') else: info('No new commits.') errors = data.get('errors', ()) for error in errors: warning(error)
def choose_image(): click.echo( 'Now let\'s pick a Docker image to use with your code.\n' 'Here are some recommended choices, but feel free to type in one of your own.' ) while True: image = prompt_from_list( IMAGE_SUGGESTIONS, 'Choose a number or enter a Docker image name.', nonlist_validator=lambda s: s.strip()) if isinstance(image, dict): image = image['name'] if click.confirm('Is {image} correct?'.format( image=click.style(image, bold=True))): break success('Great! Using {image}.'.format(image=image)) return image
def validate_file(filename): """ Validate `filename`, print its errors, and return the number of errors. :param filename: YAML filename :type filename: str :return: Number of errors :rtype: int """ with open(filename, 'r') as infp: try: data = read_yaml(infp) except Exception as e: click.secho('%s: could not parse YAML: %s' % (filename, e), fg='red', bold=True) return 1 validator = get_validator() errors = sorted( validator.iter_errors(data), key=lambda error: (relevance(error), repr(error.path)), ) if not errors: success('%s: No errors' % filename) return 0 click.secho('%s: %d errors' % (filename, len(errors)), fg='yellow', bold=True) for error in errors: simplified_schema_path = [ el for el in list(error.relative_schema_path)[:-1] if el not in ('properties', 'items') ] obj_path = [str(el) for el in error.path] click.echo( ' {validator} validation on {schema_path}: {message} ({path})'. format( validator=click.style(error.validator.title(), bold=True), schema_path=click.style('.'.join(simplified_schema_path), bold=True), message=click.style(error.message, fg='red'), path=click.style('.'.join(obj_path), bold=True), )) click.echo() return len(errors)
def yaml_wizard(directory: str) -> None: while True: command = choose_command(directory) image = choose_image() yaml = YAML_SKELLINGTON.format( image=image, command=command, ) click.secho('Here\'s a preview of the Valohai.yaml file I\'m going to create.', fg='cyan') print(yaml) yaml_path = os.path.join(directory, 'valohai.yaml') if not click.confirm(f'Write this to {click.style(yaml_path, bold=True)}?'): # pragma: no cover click.echo('Okay, let\'s try again...') continue with codecs.open(yaml_path, 'w', 'UTF-8') as out_fp: out_fp.write(yaml) success(f'All done! Wrote {yaml_path}.') break
def create_adhoc_commit(project): """ Create an ad-hoc tarball and commit of the project directory. :param project: Project :type project: valohai_cli.models.project.Project :return: Commit response object from API :rtype: dict[str, object] """ tarball = None try: click.echo('Packaging {dir}...'.format(dir=project.directory)) tarball = package_directory(project.directory, progress=True) # TODO: We could check whether the commit is known already size = os.stat(tarball).st_size click.echo('Uploading {size:.2f} KiB...'.format(size=size / 1024.)) upload = MultipartEncoder( {'data': ('data.tgz', open(tarball, 'rb'), 'application/gzip')}) prog = click.progressbar(length=upload.len, width=0) prog.is_hidden = (size < 524288 ) # Don't bother with the bar if the upload is small with prog: def callback(upload): prog.pos = upload.bytes_read prog.update(0) # Step is 0 because we set pos above monitor = MultipartEncoderMonitor(upload, callback) resp = request( 'post', '/api/v0/projects/{id}/import-package/'.format(id=project.id), data=monitor, headers={ 'Content-Type': monitor.content_type }, ).json() success('Uploaded ad-hoc code {identifier}'.format( identifier=resp['identifier'])) finally: if tarball: os.unlink(tarball) return resp
def fetch(): """ Fetch new commits for the linked project. """ project = get_project(require=True) resp = request('post', '/api/v0/projects/{id}/fetch/'.format(id=project.id)) data = resp.json() commits = data.get('commits', ()) if commits: for commit in commits: success('Fetched: {ref} ({identifier})'.format( ref=commit['ref'], identifier=commit['identifier'])) success('{n} new commits were fetched!'.format(n=len(commits))) else: click.echo('No new commits.') errors = data.get('errors', ()) for error in errors: warning(error)
def export_docker_image(image: str, output_path: str, print_progress: bool = True) -> None: """ Export the Docker image `image` to the tar file `output_path`, with visual progress. :param image: Image specifier :param output_path: Output pathname """ proc = subprocess.Popen(['docker', 'save', image], bufsize=-1, stdout=subprocess.PIPE) if not proc.stdout: raise RuntimeError("No output stream") print_progress = (print_progress and sys.stdout.isatty()) # Don't bother with acquiring the image size if we're not going to print it anyway image_size = (get_docker_image_size(image) if print_progress else None) with open(output_path, 'wb') as outfp: if print_progress: click.echo('Initializing export...\r', nl=False, err=True) while proc.poll() is None: chunk = proc.stdout.read(1048576) if not chunk: break outfp.write(chunk) if print_progress: width = click.get_terminal_size()[0] status_text = '{} {}: {} / {}'.format( get_spinner_character(), image, filesizeformat(outfp.tell()), (filesizeformat(image_size) if image_size else 'unknown size'), ) click.echo(status_text.ljust(width - 1), nl=False, err=True) if proc.returncode: raise subprocess.CalledProcessError(proc.returncode, 'docker save ' + image) success( f'{image} exported: {filesizeformat(os.stat(output_path).st_size)}')