async def bake( root: Root, batch: str, local_executor: bool, meta_from_file: Optional[str], param: List[Tuple[str, str]], name: Optional[str], tag: Sequence[str], ) -> None: """Start a batch. Run BATCH pipeline remotely on the cluster. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) params = {key: value for key, value in param} if meta_from_file is not None: bake_meta = parse_bake_meta(LocalPath(meta_from_file)) params = {**bake_meta, **params} await runner.bake( batch_name=batch, local_executor=local_executor, params=params, name=name, tags=tag, )
async def async_shell_complete(self, root: Root, ctx: click.Context, param: click.Parameter, incomplete: str) -> List[CompletionItem]: variants: List[str] = [] bake_id = ctx.params[self._bake_id_param_name] attempt_no = ctx.params[self._attempt_no_param_name] async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context( ApiStorage(client)) runner: BatchRunner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) try: bake_id = await resolve_bake(bake_id, project=runner.project_id, storage=storage) attempt = await runner.get_bake_attempt(bake_id, attempt_no=attempt_no) except ResourceNotFound: return [] tasks = [ task async for task in storage.bake(id=bake_id).attempt( id=attempt.id).list_tasks() ] if self._include_finished: variants.extend(".".join(task.yaml_id) for task in tasks if task.status.is_finished) if self._include_started: variants.extend(".".join(task.yaml_id) for task in tasks if task.status.is_finished) return [ CompletionItem(task) for task in variants if task.startswith(incomplete) ]
async def restart( root: Root, bake: str, attempt: int, from_failed: bool, local_executor: bool, ) -> None: """Start a batch. Run BATCH pipeline remotely on the cluster. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) bake_id = await resolve_bake(bake, project=runner.project_id, storage=storage) await runner.restart( bake_id, attempt_no=attempt, from_failed=from_failed, local_executor=local_executor, )
async def inspect( root: Root, bake: str, attempt: int, output_graph: Optional[str], dot: bool, pdf: bool, view: bool, ) -> None: """Inspect a bake. Display a list of started/finished tasks of BAKE\\_ID. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) if output_graph is not None: real_output: Optional[LocalPath] = LocalPath(output_graph) else: real_output = None bake_id = await resolve_bake(bake, project=runner.project_id, storage=storage) await runner.inspect( bake_id, attempt_no=attempt, output=real_output, save_dot=dot, save_pdf=pdf, view_pdf=view, )
async def execute( root: Root, bake_id: str, ) -> None: """Start a batch. Run BATCH pipeline remotely on the cluster. """ # neuro-flow execute is run in linux container only, # Linux signals are always defined. for signame in ( signal.SIGHUP, signal.SIGINT, signal.SIGQUIT, signal.SIGTSTP, signal.SIGTERM, signal.SIGTTIN, signal.SIGTTOU, signal.SIGWINCH, ): # ignore everything, use neuro-flow cancel to stop the master job. signal.signal(signame, signal.SIG_IGN) async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) await runner.process(bake_id)
async def mkvolumes(root: Root, ) -> None: """Create all remote folders for volumes.""" async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( LiveRunner(root.config_dir, root.console, client, storage, root)) await runner.mkvolumes()
async def build(root: Root, force_overwrite: bool, image: str) -> None: """Build an image. Assemble the IMAGE remotely and publish it. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( LiveRunner(root.config_dir, root.console, client, storage, root)) if image == "ALL": await runner.build_all(force_overwrite=force_overwrite) else: await runner.build(image, force_overwrite=force_overwrite)
async def async_shell_complete(self, root: Root, ctx: click.Context, param: click.Parameter, incomplete: str) -> List[CompletionItem]: job_id = ctx.params[self._job_id_param_name] async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context( ApiStorage(client)) runner = await stack.enter_async_context( LiveRunner(root.config_dir, root.console, client, storage, root)) variants = await runner.list_suffixes(job_id) return [ CompletionItem(suffix) for suffix in variants if suffix.startswith(incomplete) ]
async def async_shell_complete(self, root: Root, ctx: click.Context, param: click.Parameter, incomplete: str) -> List[CompletionItem]: async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context( ApiStorage(client)) runner = await stack.enter_async_context( LiveRunner(root.config_dir, root.console, client, storage, root)) variants = list(runner.flow.job_ids) if self._allow_all: variants += ["ALL"] return [ CompletionItem(job_id) for job_id in variants if job_id.startswith(incomplete) ]
async def async_shell_complete(self, root: Root, ctx: click.Context, param: click.Parameter, incomplete: str) -> List[CompletionItem]: variants = [] async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context( ApiStorage(client)) try: async for project in storage.list_projects(): variants.append(project.yaml_id) except ValueError: pass return [ CompletionItem(yaml_id) for yaml_id in variants if yaml_id.startswith(incomplete) ]
async def download( root: Root, volume: str, ) -> None: """Download volume. Download remote files to local for VOLUME, use `download ALL` for downloading all volumes.""" async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( LiveRunner(root.config_dir, root.console, client, storage, root)) if volume != "ALL": await runner.download(volume) else: await runner.download_all()
async def clean( root: Root, volume: str, ) -> None: """Clean volume. Clean remote files on VOLUME, use `clean ALL` for cleaning up all volumes.""" async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( LiveRunner(root.config_dir, root.console, client, storage, root)) if volume != "ALL": await runner.clean(volume) else: await runner.clean_all()
async def cancel( root: Root, bake: str, attempt: int, ) -> None: """Cancel a bake. Cancel a bake execution by stopping all started tasks. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) bake_id = await resolve_bake(bake, project=runner.project_id, storage=storage) await runner.cancel(bake_id, attempt_no=attempt)
async def bakes( root: Root, tag: Sequence[str], since: Optional[str], until: Optional[str], recent_first: bool, ) -> None: """List existing bakes.""" async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) await runner.list_bakes( tags=set(tag), since=_parse_date(since), until=_parse_date(until), recent_first=recent_first, )
async def show( root: Root, bake: str, attempt: int, task_id: str, raw: bool, ) -> None: """Show output of baked task. Display a logged output of TASK\\_ID from BAKE\\_ID. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) bake_id = await resolve_bake(bake, project=runner.project_id, storage=storage) await runner.logs(bake_id, task_id, attempt_no=attempt, raw=raw)
async def async_shell_complete(self, root: Root, ctx: click.Context, param: click.Parameter, incomplete: str) -> List[CompletionItem]: async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context( ApiStorage(client)) runner = await stack.enter_async_context( LiveRunner(root.config_dir, root.console, client, storage, root)) variants = [ volume.id for volume in runner.flow.volumes.values() if volume.local is not None ] if self._allow_all: variants += ["ALL"] return [ CompletionItem(image) for image in variants if image.startswith(incomplete) ]
async def async_shell_complete(self, root: Root, ctx: click.Context, param: click.Parameter, incomplete: str) -> List[CompletionItem]: variants = [] async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context( ApiStorage(client)) runner: BatchRunner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) try: async for bake in runner.get_bakes(): variants.append(bake.id) if bake.name is not None: variants.append(bake.name) except ValueError: pass return [ CompletionItem(bake) for bake in variants if bake.startswith(incomplete) ]
async def clear_cache( root: Root, batch: str, task_id: Optional[str], ) -> None: """Clear cache. Use `neuro-flow clear-cache <BATCH>` for cleaning up the cache for BATCH; Use `neuro-flow clear-cache <BATCH> <TASK_ID>` for cleaning up the cache for TASK_ID in BATCH; `neuro-flow clear-cache ALL` clears all caches. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) if batch == "ALL": await runner.clear_cache(None) else: await runner.clear_cache(batch, task_id)