def test_expr_validation_not_context_field() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ flow.foo }}", ) errors = validate_expr(expr, BatchContext) assert errors[0].args[0] == "'BatchFlowCtx' has no attribute 'foo'"
def test_expr_validation_bad_set_attr() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ tags.anything }}", ) errors = validate_expr(expr, BatchContext) assert errors[0].args[0] == "'TagsCtx' has no attribute 'anything'"
def test_expr_validation_bad_subcontext_lookup() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ needs.task.foo }}", ) errors = validate_expr(expr, BatchTaskContext, known_needs={"task"}) assert errors[0].args[0] == "'DepCtx' has no attribute 'foo'"
def test_expr_validation_unknown_context() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ something_new }}", ) errors = validate_expr(expr, BatchContext) assert errors[0].args[0] == "Unknown context 'something_new'"
def test_expr_validation_ok_for_property_access() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ volumes.volume.ref_rw }}", ) errors = validate_expr(expr, BatchContext) assert errors == []
def test_expr_validation_set_indexing() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ tags['flow_id'] }}", ) errors = validate_expr(expr, BatchContext) assert errors[0].args[0] == "'TagsCtx' is not subscriptable"
def test_expr_validation_list_comp_ok() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ [x * x for x in range(5) ] }}", ) errors = validate_expr(expr, BatchContext) assert errors == []
def test_expr_validation_ok() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ flow.flow_id }}", ) errors = validate_expr(expr, BatchContext) assert errors == []
def test_non_template_with_unknown_chars() -> None: assert [ Token( "TEXT", "abc!jik", Pos(0, 0, LocalPath("<test>")), Pos(0, 7, LocalPath("<test>")), ) ] == list(tokenize("abc!jik", Pos(0, 0, LocalPath("<test>"))))
def test_expr_validation_invalid_need() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ needs.fff }}", ) errors = validate_expr(expr, BatchTaskContext, known_needs=set()) assert errors[0].args[ 0] == "Task 'fff' is not available under needs context"
def test_non_template() -> None: assert [ Token( "TEXT", "abc def jik", Pos(0, 0, LocalPath("<test>")), Pos(0, 13, LocalPath("<test>")), ) ] == list(tokenize("abc def jik", Pos(0, 0, LocalPath("<test>"))))
def test_expr_validation_invalid_input() -> None: expr = StrExpr( Pos(0, 0, LocalPath("<default>")), Pos(0, 0, LocalPath("<default>")), pattern="${{ inputs.fff }}", ) errors = validate_expr(expr, BatchActionContext[EmptyRoot], known_inputs=set()) assert errors[0].args[0] == "Input 'fff' is undefined"
async def bake( root: Root, batch: str, local_executor: bool, meta_from_file: Optional[str], param: List[Tuple[str, str]], name: Optional[str], tag: Sequence[str], ) -> None: """Start a batch. Run BATCH pipeline remotely on the cluster. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) params = {key: value for key, value in param} if meta_from_file is not None: bake_meta = parse_bake_meta(LocalPath(meta_from_file)) params = {**bake_meta, **params} await runner.bake( batch_name=batch, local_executor=local_executor, params=params, name=name, tags=tag, )
async def test_volumes(live_config_loader: ConfigLoader) -> None: flow = await RunningLiveFlow.create(live_config_loader, "live-full") ctx = flow._ctx assert ctx.volumes.keys() == {"volume_a", "volume_b"} assert ctx.volumes["volume_a"].id == "volume_a" assert ctx.volumes["volume_a"].remote == URL("storage:dir") assert ctx.volumes["volume_a"].mount == RemotePath("/var/dir") assert ctx.volumes["volume_a"].read_only assert ctx.volumes["volume_a"].local == LocalPath("dir") assert (ctx.volumes["volume_a"].full_local_path == live_config_loader.workspace / "dir") assert ctx.volumes["volume_a"].ref_ro == "storage:dir:/var/dir:ro" assert ctx.volumes["volume_a"].ref_rw == "storage:dir:/var/dir:rw" assert ctx.volumes["volume_a"].ref == "storage:dir:/var/dir:ro" assert ctx.volumes["volume_b"].id == "volume_b" assert ctx.volumes["volume_b"].remote == URL("storage:other") assert ctx.volumes["volume_b"].mount == RemotePath("/var/other") assert not ctx.volumes["volume_b"].read_only assert ctx.volumes["volume_b"].local is None assert ctx.volumes["volume_b"].full_local_path is None assert ctx.volumes["volume_b"].ref_ro == "storage:other:/var/other:ro" assert ctx.volumes["volume_b"].ref_rw == "storage:other:/var/other:rw" assert ctx.volumes["volume_b"].ref == "storage:other:/var/other:rw"
async def inspect( root: Root, bake: str, attempt: int, output_graph: Optional[str], dot: bool, pdf: bool, view: bool, ) -> None: """Inspect a bake. Display a list of started/finished tasks of BAKE\\_ID. """ async with AsyncExitStack() as stack: client = await stack.enter_async_context(neuro_sdk.get()) storage: Storage = await stack.enter_async_context(ApiStorage(client)) runner = await stack.enter_async_context( BatchRunner(root.config_dir, root.console, client, storage, root)) if output_graph is not None: real_output: Optional[LocalPath] = LocalPath(output_graph) else: real_output = None bake_id = await resolve_bake(bake, project=runner.project_id, storage=storage) await runner.inspect( bake_id, attempt_no=attempt, output=real_output, save_dot=dot, save_pdf=pdf, view_pdf=view, )
def _make_ast_call( args: Mapping[str, Union[bool, int, float, str]]) -> ast.BaseActionCall: def _make_simple_str_expr(res: Optional[str]) -> SimpleStrExpr: return SimpleStrExpr(Pos(0, 0, LocalPath("fake")), Pos(0, 0, LocalPath("fake")), res) def _make_primitive_expr( res: Union[bool, int, float, str]) -> PrimitiveExpr: return PrimitiveExpr(Pos(0, 0, LocalPath("fake")), Pos(0, 0, LocalPath("fake")), res) return ast.BaseActionCall( _start=Pos(0, 0, LocalPath("fake")), _end=Pos(0, 0, LocalPath("fake")), action=_make_simple_str_expr("ws:test"), args={key: _make_primitive_expr(value) for key, value in args.items()}, )
def _process_args( self, ctx: click.Context, config: Optional[str], fake_workspace: bool, verbose: int, quiet: int, show_traceback: bool, ) -> None: if fake_workspace: config_dir = ConfigDir( LocalPath("running-with-fake-workspace"), LocalPath("running-with-fake-workspace"), ) else: config_dir = find_workspace(config) console = Console( highlight=False, log_path=False, log_time_format="[%X %z]", get_datetime=lambda: datetime.now().astimezone(), ) verbosity = verbose - quiet setup_logging( color=bool(console.color_system), verbosity=verbosity, show_traceback=show_traceback, ) ctx.obj = Root( config_dir=config_dir, console=console, verbosity=verbosity, show_traceback=show_traceback, )
def _make_ast_inputs( args: Mapping[str, Tuple[Optional[Union[bool, int, float, str]], InputType]] ) -> Mapping[str, ast.Input]: def _make_opt_primitive_expr( res: Optional[Union[bool, int, float, str]]) -> SimpleOptPrimitiveExpr: return SimpleOptPrimitiveExpr(Pos(0, 0, LocalPath("fake")), Pos(0, 0, LocalPath("fake")), res) def _make_opt_str_expr(res: Optional[str]) -> SimpleOptStrExpr: return SimpleOptStrExpr(Pos(0, 0, LocalPath("fake")), Pos(0, 0, LocalPath("fake")), res) return { key: ast.Input( _start=Pos(0, 0, LocalPath("fake")), _end=Pos(0, 0, LocalPath("fake")), default=_make_opt_primitive_expr(value[0]), descr=_make_opt_str_expr(None), type=value[1], ) for key, value in args.items() }
async def test_upload_dry_run_mode_prints_commands( client: Client, live_context_factory: LiveContextFactory, capsys: CaptureFixture[str], ) -> None: expr = StrExpr( POS, POS, "${{ upload(volumes.test) }}", ) ctx = live_context_factory( client, set(), dry_run=True, volumes={ "test": VolumeCtx( id="test", full_local_path=LocalPath("/test/local"), local=LocalPath("local"), remote=URL("storage://cluster/user/somedir"), read_only=False, mount=RemotePath("/mnt"), ) }, ) await expr.eval(ctx) capture = capsys.readouterr() assert "neuro mkdir --parents storage://cluster/user\n" in capture.out if sys.platform == "win32": assert ("neuro cp --recursive --update --no-target-directory" " '\\test\\local' storage://cluster/user/somedir\n" in capture.out) else: assert ("neuro cp --recursive --update --no-target-directory" " /test/local storage://cluster/user/somedir\n" in capture.out)
async def test_local_action( jobs_mock: JobsMock, assets: Path, run_executor: Callable[[Path, str], Awaitable[None]], ) -> None: with TemporaryDirectory() as dir: ws = LocalPath(dir) / "local_actions" shutil.copytree(assets / "local_actions", ws) executor_task = asyncio.ensure_future(run_executor(ws, "call-cp")) descr = await jobs_mock.get_task("remote-task") assert descr.container.command assert "echo 0" in descr.container.command assert (ws / "file").read_text() == "test\n" assert (ws / "file_copy").read_text() == "test\n" await jobs_mock.mark_done("remote-task") await executor_task
async def test_local_remote_path_images( client: Client, live_config_loader: ConfigLoader) -> None: flow = await RunningLiveFlow.create(live_config_loader, "live-different-images") ctx = flow._ctx assert ctx.images.keys() == {"image_local", "image_remote"} assert ctx.images[ "image_local"].context == live_config_loader.workspace / "dir" assert ( ctx.images["image_local"].dockerfile == live_config_loader.workspace / "dir/Dockerfile") assert ctx.images["image_local"].dockerfile_rel == LocalPath("Dockerfile") assert ctx.images["image_remote"].context == URL( f"storage://{client.cluster_name}/{client.username}/dir") assert ctx.images["image_remote"].dockerfile == URL( f"storage://{client.cluster_name}/{client.username}/dir/Dockerfile") assert ctx.images["image_remote"].dockerfile_rel == RemotePath( "Dockerfile")
async def action_stream(self, action_name: str) -> AsyncIterator[TextIO]: action = ActionSpec.parse(action_name) if action.is_local: path = self._workspace / action.spec if not path.exists(): path = path.with_suffix(".yml") if not path.exists(): path = path.with_suffix(".yaml") if not path.exists(): raise ValueError(f"Action {action_name} does not exist") with path.open() as f: yield f elif action.is_github: repo, sep, version = action.spec.partition("@") if not sep: raise ValueError( f"{action_name} is github action, but has no version") async with self._tarball_from_github(repo, version) as tarball: tar = tarfile.open(fileobj=tarball) for member in tar.getmembers(): member_path = LocalPath(member.name) # find action yml file if len(member_path.parts) == 2 and ( member_path.parts[1] == "action.yml" or member_path.parts[1] == "action.yaml"): if member.isfile(): file_obj = tar.extractfile(member) if file_obj is None: raise ValueError( f"Github repo {repo} do not contain " '"action.yml" or "action.yaml" files.') # Cast is workaround for # https://github.com/python/typeshed/issues/4349 yield NamedTextIOWrapper(action_name, cast(BinaryIO, file_obj)) else: raise ValueError(f"Unsupported scheme '{action.scheme}'")
import click import os import sys from neuro_flow.cli.utils import wrap_async from neuro_flow.types import LocalPath from .root import Root CFG_FILE = {"bash": LocalPath("~/.bashrc"), "zsh": LocalPath("~/.zshrc")} SOURCE_CMD = {"bash": "bash_source", "zsh": "zsh_source"} ACTIVATION_TEMPLATE = 'eval "$(_NEURO_FLOW_COMPLETE={cmd} {exe})"' @click.group() def completion() -> None: """ Output shell completion code. """ @click.command() @click.argument("shell", type=click.Choice(["bash", "zsh"])) @wrap_async() async def generate(root: Root, shell: str) -> None: """ Provide instruction for shell completion generation. """ root.console.print(f"Push the following line into your {CFG_FILE[shell]}") root.console.print(
Call, DictMaker, ItemGetter, ListMaker, Literal, Lookup, Text, UnaryOp, logical_and, logical_or, ) from neuro_flow.tokenizer import Pos, tokenize from neuro_flow.types import LocalPath FNAME = LocalPath("<test>") START: Final = Pos(0, 0, FNAME) def test_tmpl_ok1() -> None: assert [Lookup(Pos(0, 4, FNAME), Pos(0, 8, FNAME), "name", [])] == PARSER.parse( list(tokenize("${{ name }}", START)) ) def test_tmpl_ok2() -> None: assert [ Lookup( Pos(0, 4, FNAME), Pos(0, 18, FNAME), "name",
TagsCtx, VolumeCtx, VolumesCtx, ) from neuro_flow.expr import ( EvalError, PrimitiveExpr, RootABC, SequenceExpr, StrExpr, TypeT, ) from neuro_flow.tokenizer import Pos from neuro_flow.types import LocalPath, RemotePath POS = Pos(0, 0, LocalPath(__file__)) class Root(RootABC): def __init__(self, mapping: Mapping[str, TypeT], client: Client, dry_run: bool = False) -> None: self._mapping = mapping self._client = client self._dry_run = dry_run def lookup(self, name: str) -> TypeT: return self._mapping[name] @asynccontextmanager
async def test_hash_files(client: Client) -> None: expr = StrExpr(POS, POS, "${{ hash_files('Dockerfile', 'requirements/*.txt') }}") folder = LocalPath(__file__).parent / "hash_files" ret = await expr.eval(Root({"flow": {"workspace": folder}}, client)) assert ret == "081fde04651e1184890a0470501bff3db8e0014260224e07acf5688e70e0edbe"
def test_template_curly() -> None: assert [ Token( "TEXT", "abc ", Pos(0, 0, LocalPath("<test>")), Pos(0, 4, LocalPath("<test>")), ), Token( "LTMPL", "${{", Pos(0, 4, LocalPath("<test>")), Pos(0, 7, LocalPath("<test>")), ), Token( "NAME", "job", Pos(0, 8, LocalPath("<test>")), Pos(0, 11, LocalPath("<test>")), ), Token("DOT", ".", Pos(0, 11, LocalPath("<test>")), Pos(0, 12, LocalPath("<test>"))), Token( "NAME", "job_id", Pos(0, 12, LocalPath("<test>")), Pos(0, 18, LocalPath("<test>")), ), Token("DOT", ".", Pos(0, 18, LocalPath("<test>")), Pos(0, 19, LocalPath("<test>"))), Token( "NAME", "name", Pos(0, 19, LocalPath("<test>")), Pos(0, 23, LocalPath("<test>")), ), Token( "RTMPL", "}}", Pos(0, 24, LocalPath("<test>")), Pos(0, 26, LocalPath("<test>")), ), Token( "TEXT", "jik", Pos(0, 26, LocalPath("<test>")), Pos(0, 29, LocalPath("<test>")), ), ] == list( tokenize("abc ${{ job.job_id.name }}jik", Pos(0, 0, LocalPath("<test>"))))
def test_empty() -> None: assert [] == list(tokenize("", Pos(0, 0, LocalPath("<test>"))))
def test_template_square() -> None: assert [ Token( "TEXT", "abc ", Pos(0, 0, LocalPath("<test>")), Pos(0, 4, LocalPath("<test>")), ), Token( "LTMPL2", "$[[", Pos(0, 4, LocalPath("<test>")), Pos(0, 7, LocalPath("<test>")), ), Token( "NAME", "job", Pos(0, 8, LocalPath("<test>")), Pos(0, 11, LocalPath("<test>")), ), Token("DOT", ".", Pos(0, 11, LocalPath("<test>")), Pos(0, 12, LocalPath("<test>"))), Token( "NAME", "job_id", Pos(0, 12, LocalPath("<test>")), Pos(0, 18, LocalPath("<test>")), ), Token("DOT", ".", Pos(0, 18, LocalPath("<test>")), Pos(0, 19, LocalPath("<test>"))), Token( "NAME", "name", Pos(0, 19, LocalPath("<test>")), Pos(0, 23, LocalPath("<test>")), ), Token( "RTMPL2", "]]", Pos(0, 24, LocalPath("<test>")), Pos(0, 26, LocalPath("<test>")), ), Token( "TEXT", "jik", Pos(0, 26, LocalPath("<test>")), Pos(0, 29, LocalPath("<test>")), ), ] == list( tokenize("abc $[[ job.job_id.name ]]jik", Pos(0, 0, LocalPath("<test>"))))
def workspace(self) -> LocalPath: return LocalPath(self._meta.workspace)