def test_resolver_default_service_ports(self): compiled_operation = V1CompiledOperation.read({ "version": 1.1, "kind": kinds.COMPILED_OPERATION, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": True, "collectResources": True, }, "run": { "kind": V1RunKind.SERVICE, "ports": [1212, 1234], "container": { "image": "test", "command": "{{ ports[0] }}" }, }, }) spec = resolve_contexts( namespace="test", owner_name="user", project_name="project", project_uuid="uuid", run_uuid="uuid", run_name="run", run_path="test", compiled_operation=compiled_operation, artifacts_store=None, connection_by_names={}, iteration=12, created_at=None, compiled_at=None, ) assert spec == { "globals": { "owner_name": "user", "project_name": "project", "project_unique_name": "user.project", "project_uuid": "uuid", "run_info": "user.project.runs.uuid", "name": "run", "uuid": "uuid", "context_path": "/plx-context", "artifacts_path": "/plx-context/artifacts", "run_artifacts_path": "/plx-context/artifacts/test", "run_outputs_path": "/plx-context/artifacts/test/outputs", "namespace": "test", "iteration": 12, "ports": [1212, 1234], "base_url": "/services/v1/test/user/project/runs/uuid", "created_at": None, "compiled_at": None, "cloning_kind": None, "original_uuid": None, }, "init": {}, "connections": {}, }
def resolve(run: BaseRun, compiled_at: datetime = None, resolver_cls=None): resolver_cls = resolver_cls or CorePlatformResolver try: project = run.project return resolver.resolve( run=run, compiled_operation=V1CompiledOperation.read(run.content), owner_name=project.owner.name, project_name=project.name, project_uuid=project.uuid.hex, run_uuid=run.uuid.hex, run_name=run.name, run_path=run.subpath, resolver_cls=resolver_cls, params=None, compiled_at=compiled_at, created_at=run.created_at, cloning_kind=run.cloning_kind, original_uuid=run.original.uuid.hex if run.original_id else None, ) except ( AccessNotAuthorized, AccessNotFound, MarshmallowValidationError, PolyaxonSchemaError, ValidationError, ) as e: raise PolyaxonCompilerError("Compilation Error: %s" % e) from e
def test_sequential_pipeline(self): run_config = V1CompiledOperation.read( [ os.path.abspath( "tests/fixtures/pipelines/simple_sequential_pipeline.yml" ), {"kind": "compiled_operation"}, ] ) run_config = CompiledOperationSpecification.apply_context(run_config) assert run_config.run is not None assert len(run_config.run.operations) == 4 assert run_config.run.operations[0].name == "job1" assert run_config.run.operations[1].name == "job2" assert run_config.run.operations[1].dependencies == ["job1"] assert run_config.run.operations[2].name == "experiment1" assert run_config.run.operations[2].dependencies == ["job2"] assert run_config.run.operations[3].name == "experiment2" assert run_config.run.operations[3].dependencies == ["experiment1"] dag_strategy = run_config.run assert dag_strategy.sort_topologically(dag_strategy.dag) == [ ["job1"], ["job2"], ["experiment1"], ["experiment2"], ] assert run_config.schedule is None
def test_parallel_pipeline(self): run_config = V1CompiledOperation.read( [ os.path.abspath( "tests/fixtures/pipelines/simple_parallel_pipeline.yml" ), {"kind": "compiled_operation"}, ] ) run_config = CompiledOperationSpecification.apply_context(run_config) assert len(run_config.run.operations) == 4 assert run_config.run.operations[0].name == "job1" assert run_config.run.operations[0].dependencies is None assert run_config.run.operations[1].name == "job2" assert run_config.run.operations[1].dependencies is None assert run_config.run.operations[2].name == "experiment1" assert run_config.run.operations[2].dependencies is None assert run_config.run.operations[3].name == "experiment2" assert run_config.run.operations[3].dependencies is None dag_strategy = run_config.run assert set(dag_strategy.sort_topologically(dag_strategy.dag)[0]) == { "job1", "job2", "experiment1", "experiment2", } assert run_config.run.concurrency == 2 assert run_config.schedule is None
def test_specification_with_quotes(self): run_config = CompiledOperationSpecification.read([ os.path.abspath( "tests/fixtures/plain/polyaxonfile_with_quotes.yaml"), { "kind": "compiled_operation" }, ]) run_config = CompiledOperationSpecification.apply_operation_contexts( run_config) expected_run = { "kind": V1RunKind.JOB, "container": { "image": "continuumio/miniconda3", "command": ["python"], "args": ["-c \"print('Tweet tweet')\""], "name": "polyaxon-main", }, } assert run_config.run.to_dict() == expected_run run_config = V1CompiledOperation.read(run_config.to_dict()) run_config = CompiledOperationSpecification.apply_operation_contexts( run_config) assert run_config.run.to_dict() == expected_run
def test_dag_pipeline(self): run_config = V1CompiledOperation.read( [ os.path.abspath("tests/fixtures/pipelines/simple_dag_pipeline.yml"), {"kind": "compiled_operation"}, ] ) run_config = CompiledOperationSpecification.apply_context(run_config) assert len(run_config.run.operations) == 5 assert run_config.run.operations[0].name == "job1" assert run_config.run.operations[1].name == "experiment1" assert run_config.run.operations[1].dependencies == ["job1"] assert run_config.run.operations[2].name == "experiment2" assert run_config.run.operations[2].dependencies == ["job1"] assert run_config.run.operations[3].name == "experiment3" assert run_config.run.operations[3].dependencies == ["job1"] assert run_config.run.operations[4].name == "job2" assert run_config.run.operations[4].dependencies == [ "experiment1", "experiment2", "experiment3", ] dag_strategy = run_config.run sorted_dag = dag_strategy.sort_topologically(dag_strategy.dag) assert sorted_dag[0] == ["job1"] assert set(sorted_dag[1]) == {"experiment1", "experiment2", "experiment3"} assert sorted_dag[2] == ["job2"] assert run_config.run.concurrency == 3 assert run_config.schedule is None
def test_matrix_file_passes_int_float_types(self): run_config = V1CompiledOperation.read( [ os.path.abspath( "tests/fixtures/pipelines/matrix_file_with_int_float_types.yml" ), {"kind": "compiled_operation"}, ] ) run_config = CompiledOperationSpecification.apply_context(run_config) assert run_config.version == 1.05 assert run_config.is_dag_run is True assert run_config.has_pipeline is True assert run_config.schedule is None assert run_config.run.concurrency == 4 assert isinstance(run_config.run, V1Dag) assert run_config.run.early_stopping is None assert run_config.run.kind == V1Dag.IDENTIFIER assert len(run_config.run.operations) == 2 assert len(run_config.run.components) == 1 template_grid = run_config.run.operations[1].parallel assert isinstance(template_grid, V1GridSearch) assert isinstance(template_grid.params["param1"], V1HpChoice) assert isinstance(template_grid.params["param2"], V1HpChoice) assert template_grid.params["param1"].to_dict() == { "kind": "choice", "value": [1, 2], } assert template_grid.params["param2"].to_dict() == { "kind": "choice", "value": [3.3, 4.4], } assert template_grid.concurrency == 2 assert template_grid.early_stopping is None
def test_get_from_spec(self): compiled_operation = V1CompiledOperation.read({ "version": 1.1, "kind": kinds.COMPILED_OPERATION, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": False, "syncStatuses": False, "externalHost": True, }, "run": { "kind": V1RunKind.JOB, "container": { "image": "test" } }, }) spec = PluginsContextsSpec.from_config(compiled_operation.plugins) assert spec.auth is False assert spec.docker is False assert spec.shm is False assert spec.collect_artifacts is False assert spec.collect_logs is False assert spec.sync_statuses is False assert spec.external_host is True
def test_matrix_file_passes(self): run_config = V1CompiledOperation.read([ os.path.abspath("tests/fixtures/pipelines/matrix_file.yml"), { "kind": "compiled_operation" }, ]) run_config = CompiledOperationSpecification.apply_operation_contexts( run_config) assert run_config.version == 1.1 assert run_config.is_dag_run is True assert run_config.has_pipeline is True assert run_config.schedule is None assert run_config.run.concurrency == 4 assert isinstance(run_config.run, V1Dag) assert run_config.run.early_stopping is None assert run_config.run.kind == V1Dag.IDENTIFIER assert len(run_config.run.operations) == 2 assert len(run_config.run.components) == 1 template_hyperband = run_config.run.operations[1].matrix assert isinstance(template_hyperband.params["lr"], V1HpLinSpace) assert isinstance(template_hyperband.params["loss"], V1HpChoice) assert template_hyperband.params["lr"].to_dict() == { "kind": "linspace", "value": { "start": 0.01, "stop": 0.1, "num": 5 }, } assert template_hyperband.params["loss"].to_dict() == { "kind": "choice", "value": ["MeanSquaredError", "AbsoluteDifference"], } assert template_hyperband.params["normal_rate"].to_dict() == { "kind": "normal", "value": { "loc": 0, "scale": 0.9 }, } assert template_hyperband.params["dropout"].to_dict() == { "kind": "qloguniform", "value": { "high": 0.8, "low": 0, "q": 0.1 }, } assert template_hyperband.params["activation"].to_dict() == { "kind": "pchoice", "value": [["relu", 0.1], ["sigmoid", 0.8]], } assert template_hyperband.params["model"].to_dict() == { "kind": "choice", "value": ["CDNA", "DNA", "STP"], } assert template_hyperband.concurrency == 2 assert isinstance(template_hyperband, V1Hyperband)
def test_resolver_default_contexts(self): context_root = container_contexts.CONTEXT_ROOT compiled_operation = V1CompiledOperation.read({ "version": 1.1, "kind": kinds.COMPILED_OPERATION, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": False, "collectResources": False, }, "run": { "kind": V1RunKind.JOB, "container": { "image": "test" } }, }) spec = resolve_contexts( namespace="test", owner_name="user", project_name="project", project_uuid="uuid", run_uuid="uuid", run_name="run", run_path="test", compiled_operation=compiled_operation, artifacts_store=None, connection_by_names={}, iteration=None, created_at=None, compiled_at=None, ) assert spec == { "globals": { "owner_name": "user", "project_unique_name": "user.project", "project_name": "project", "project_uuid": "uuid", "run_info": "user.project.runs.uuid", "context_path": context_root, "artifacts_path": "{}/artifacts".format(context_root), "name": "run", "uuid": "uuid", "namespace": "test", "iteration": None, "created_at": None, "compiled_at": None, "schedule_at": None, "started_at": None, "finished_at": None, "duration": None, "cloning_kind": None, "original_uuid": None, }, "init": {}, "connections": {}, }
def test_resolver_init_and_connections_contexts(self): store = V1ConnectionType( name="test_claim", kind=V1ConnectionKind.VOLUME_CLAIM, schema=V1ClaimConnection( mount_path="/claim/path", volume_claim="claim", read_only=True ), ) compiled_operation = V1CompiledOperation.read( { "version": 1.05, "kind": kinds.COMPILED_OPERATION, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": False, "collectResources": False, }, "run": { "kind": V1RunKind.JOB, "container": {"image": "test"}, "connections": [store.name], "init": [{"connection": store.name}], }, } ) spec = resolve_contexts( namespace="test", owner_name="user", project_name="project", project_uuid="uuid", run_uuid="uuid", run_name="run", run_path="test", compiled_operation=compiled_operation, artifacts_store=store, connection_by_names={store.name: store}, iteration=12, ) assert spec == { "globals": { "owner_name": "user", "project_unique_name": "user.project", "project_name": "project", "project_uuid": "uuid", "name": "run", "uuid": "uuid", "artifacts_path": "/claim/path/test", "namespace": "test", "iteration": 12, "run_info": "user.project.runs.uuid", }, "init": {"test_claim": store.schema.to_dict()}, "connections": {"test_claim": store.schema.to_dict()}, }
def test_pipeline_with_no_ops_raises(self): run_config = V1CompiledOperation.read( [ os.path.abspath("tests/fixtures/pipelines/pipeline_with_no_ops.yml"), {"kind": "compiled_operation"}, ] ) with self.assertRaises(PolyaxonSchemaError): CompiledOperationSpecification.apply_context(run_config)
def test_run_simple_file_passes(self): run_config = V1CompiledOperation.read([ reader.read( os.path.abspath( "tests/fixtures/typing/run_cmd_simple_file.yml")), { "kind": "compiled_operation" }, ]) assert run_config.inputs[0].value == "MeanSquaredError" assert run_config.inputs[1].value is None validated_params = run_config.validate_params() assert run_config.inputs[0].value == "MeanSquaredError" assert run_config.inputs[1].value is None assert { "loss": V1Param(value="MeanSquaredError"), "num_masks": V1Param(value=None), } == {p.name: p.param for p in validated_params} with self.assertRaises(ValidationError): CompiledOperationSpecification.apply_context(run_config) validated_params = run_config.validate_params( params={"num_masks": { "value": 100 }}) assert { "loss": V1Param(value="MeanSquaredError"), "num_masks": V1Param(value=100), } == {p.name: p.param for p in validated_params} assert run_config.run.container.args == [ "video_prediction_train", "--num_masks={{num_masks}}", "--loss={{loss}}", ] with self.assertRaises(ValidationError): # Applying context before applying params CompiledOperationSpecification.apply_context(run_config) run_config.apply_params(params={"num_masks": {"value": 100}}) run_config = CompiledOperationSpecification.apply_context(run_config) run_config = CompiledOperationSpecification.apply_run_contexts( run_config) assert run_config.version == 1.05 assert run_config.tags == ["foo", "bar"] container = run_config.run.container assert isinstance(container, k8s_schemas.V1Container) assert container.image == "my_image" assert container.command == ["/bin/sh", "-c"] assert container.args == [ "video_prediction_train", "--num_masks=100", "--loss=MeanSquaredError", ]
def test_apply_params_extends_connections_and_init(self): content = { "version": 1.1, "kind": "compiled_operation", "inputs": [ { "name": "docker_image", "type": types.IMAGE }, { "name": "git_repo", "type": types.GIT }, ], "run": { "kind": V1RunKind.JOB, "connections": ["{{ params.docker_image.connection }}"], "container": { "name": "polyaxon-main", "image": "{{ docker_image }}", "command": "train", }, }, } run_config = V1CompiledOperation.read(content) # no params with self.assertRaises(ValidationError): CompiledOperationSpecification.apply_operation_contexts(run_config) params = { "docker_image": { "value": "destination:tag", "connection": "docker-registry", }, "git_repo": { "value": V1GitType(revision="foo"), "connection": "repo-connection", }, } assert run_config.inputs[0].value is None assert run_config.inputs[1].value is None validated_params = run_config.validate_params(params=params) run_config.apply_params(params=params) assert params == {p.name: p.param.to_dict() for p in validated_params} assert run_config.inputs[0].connection == "docker-registry" assert run_config.inputs[1].connection == "repo-connection" run_config = CompiledOperationSpecification.apply_operation_contexts( run_config) run_config = CompiledOperationSpecification.apply_params(run_config) run_config = CompiledOperationSpecification.apply_runtime_contexts( run_config) assert run_config.run.connections == ["docker-registry"] assert run_config.run.container.image == "destination:tag"
def test_validation_for_required_inputs_outputs_raises(self): # Get compiled_operation data run_config = V1CompiledOperation.read([ os.path.abspath("tests/fixtures/typing/required_inputs.yml"), { "kind": "compiled_operation" }, ]) # Inputs don't have delayed validation by default with self.assertRaises(ValidationError): run_config.validate_params(is_template=False, check_runs=True) run_config = V1CompiledOperation.read([ os.path.abspath("tests/fixtures/typing/required_outputs.yml"), { "kind": "compiled_operation" }, ]) # Outputs have delayed validation by default run_config.validate_params(is_template=False, check_runs=True)
def test_cyclic_pipeline_raises(self): run_config = V1CompiledOperation.read( [ os.path.abspath("tests/fixtures/pipelines/cyclic_pipeline.yml"), {"kind": "compiled_operation"}, ] ) assert run_config.is_dag_run is True assert run_config.has_pipeline is True with self.assertRaises(PolyaxonSchemaError): CompiledOperationSpecification.apply_context(run_config)
def test_no_params_for_required_inputs_outputs_raises(self): # Get compiled_operation data run_config = V1CompiledOperation.read([ os.path.abspath("tests/fixtures/typing/required_inputs.yml"), { "kind": "compiled_operation" }, ]) # Inputs don't have delayed validation by default with self.assertRaises(ValidationError): CompiledOperationSpecification.apply_context(run_config) run_config = V1CompiledOperation.read([ os.path.abspath("tests/fixtures/typing/required_outputs.yml"), { "kind": "compiled_operation" }, ]) # Outputs have delayed validation by default CompiledOperationSpecification.apply_context(run_config)
def compile_operation(cls, config: V1Operation, override: Dict = None) -> V1CompiledOperation: if override: preset = OperationSpecification.read(override, is_preset=True) config = config.patch(preset, preset.patch_strategy) # Patch run component = config.component # type: V1Component if config.run_patch: component.run = component.run.patch( validate_run_patch(config.run_patch, component.run.kind), strategy=config.patch_strategy, ) # Gather contexts io config_params = config.params or {} contexts = [ V1IO(name=p) for p in config_params if config_params[p].context_only ] patch_compiled = V1CompiledOperation( name=config.name, description=config.description, contexts=contexts, tags=config.tags, presets=config.presets, queue=config.queue, cache=config.cache, hooks=config.hooks, actions=config.actions, events=config.events, plugins=config.plugins, termination=config.termination, matrix=config.matrix, schedule=config.schedule, dependencies=config.dependencies, trigger=config.trigger, conditions=config.conditions, skip_on_upstream_skip=config.skip_on_upstream_skip, ) values = [ { cls.VERSION: config.version }, component.to_dict(), { cls.KIND: kinds.COMPILED_OPERATION }, ] compiled = V1CompiledOperation.read( values) # type: V1CompiledOperation return compiled.patch(patch_compiled, strategy=config.patch_strategy)
def test_pipeline_ops_not_corresponding_to_components(self): run_config = V1CompiledOperation.read( [ reader.read( os.path.abspath( "tests/fixtures/pipelines/pipeline_ops_not_corresponding_to_components.yml" ) ), {"kind": "compiled_operation"}, ] ) with self.assertRaises(PolyaxonSchemaError): CompiledOperationSpecification.apply_context(run_config)
def test_apply_context_raises_with_required_inputs(self): content = { "version": 1.1, "kind": "component", "inputs": [ {"name": "lr", "type": types.FLOAT}, {"name": "num_steps", "type": types.INT}, ], "run": { "kind": V1RunKind.JOB, "container": { "name": "polyaxon-main", "image": "test/test:latest", "command": "train", }, }, } component_config = V1Component.read(content) assert component_config.to_dict() == content content = { "version": 1.1, "kind": "compiled_operation", "inputs": [ {"name": "lr", "type": types.FLOAT}, {"name": "num_steps", "type": types.INT}, ], "run": { "kind": V1RunKind.JOB, "container": { "name": "polyaxon-main", "image": "test/test:latest", "command": "train", }, }, } run_config = V1CompiledOperation.read(content) # Raise because required inputs are not met with self.assertRaises(ValidationError): CompiledOperationSpecification.apply_operation_contexts(run_config) # Validation for template should pass validated_params = run_config.validate_params() assert {"lr": None, "num_steps": None} == { p.name: p.param.value for p in validated_params } # Validation for non template should raise with self.assertRaises(ValidationError): run_config.validate_params(is_template=False)
def test_spec_without_io_and_params_raises(self): content = { "version": 1.1, "kind": "component", "run": { "kind": V1RunKind.JOB, "container": { "name": "polyaxon-main", "image": "test/test:latest", "command": "train", }, }, } config = V1Component.read(content) assert config.to_dict() == content content = { "version": 1.1, "kind": "compiled_operation", "run": { "kind": V1RunKind.JOB, "container": { "name": "polyaxon-main", "image": "test/test:latest", "command": "train", }, }, } config = V1CompiledOperation.read(content) config = CompiledOperationSpecification.apply_operation_contexts( config) assert config.to_dict() == content # Add params content["params"] = {"lr": 0.1} with self.assertRaises(ValidationError): V1CompiledOperation.read(content)
def test_resolver_default_contexts(self): compiled_operation = V1CompiledOperation.read({ "version": 1.1, "kind": kinds.COMPILED_OPERATION, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": False, "collectResources": False, }, "run": { "kind": V1RunKind.JOB, "container": { "image": "test" } }, }) spec = resolve_contexts( namespace="test", owner_name="user", project_name="project", project_uuid="uuid", run_uuid="uuid", run_name="run", run_path="test", compiled_operation=compiled_operation, artifacts_store=None, connection_by_names={}, iteration=None, ) assert spec == { "globals": { "owner_name": "user", "project_unique_name": "user.project", "project_name": "project", "project_uuid": "uuid", "run_info": "user.project.runs.uuid", "context_path": "/plx-context", "artifacts_path": "/plx-context/artifacts", "name": "run", "uuid": "uuid", "namespace": "test", "iteration": None, }, "init": {}, "connections": {}, }
def test_matrix_early_stopping_file_passes(self): run_config = V1CompiledOperation.read([ os.path.abspath( "tests/fixtures/pipelines/matrix_file_early_stopping.yml"), { "kind": "compiled_operation" }, ]) run_config = CompiledOperationSpecification.apply_operation_contexts( run_config) assert run_config.run is not None assert run_config.is_dag_run is True assert run_config.has_pipeline is True assert run_config.schedule is None assert run_config.run.concurrency == 4 assert isinstance(run_config.run, V1Dag) assert run_config.run.early_stopping[ 0].kind == "failure_early_stopping" assert isinstance(run_config.run.early_stopping[0], V1FailureEarlyStopping) assert len(run_config.run.early_stopping) == 1 assert run_config.run.kind == V1Dag.IDENTIFIER assert len(run_config.run.operations) == 2 assert len(run_config.run.components) == 1 template_random = run_config.run.operations[1].matrix assert isinstance(template_random, V1RandomSearch) assert isinstance(template_random.params["lr"], V1HpLinSpace) assert isinstance(template_random.params["loss"], V1HpChoice) assert template_random.params["lr"].to_dict() == { "kind": "linspace", "value": { "start": 0.01, "stop": 0.1, "num": 5 }, } assert template_random.params["loss"].to_dict() == { "kind": "choice", "value": ["MeanSquaredError", "AbsoluteDifference"], } assert template_random.concurrency == 2 assert template_random.num_runs == 300 assert template_random.early_stopping[ 0].kind == "metric_early_stopping" assert len(template_random.early_stopping) == 1 assert isinstance(template_random.early_stopping[0], V1MetricEarlyStopping)
def setUp(self): super().setUp() self.compiled_operation = V1CompiledOperation.read( { "version": 1.1, "kind": kinds.COMPILED_OPERATION, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": False, "collectResources": False, }, "run": {"kind": V1RunKind.JOB, "container": {"image": "test"}}, } )
def setUp(self): super().setUp() self.compiled_operation = V1CompiledOperation.read({ "version": 1.1, "kind": kinds.COMPILED_OPERATION, "inputs": [ { "name": "param1", "type": types.STR, "value": "test", "isOptional": "true", }, { "name": "param1", "type": types.IMAGE, "isOptional": "true", "value": "repo1", "connection": "connection1", }, { "name": "param1", "type": types.IMAGE, "isOptional": "true", "value": "repo2", "connection": "connection2", }, ], "outputs": [{ "name": "repo2", "type": types.IMAGE, "isOptional": "true", "value": "repo3", "connection": "connection1", }], "run": { "kind": V1RunKind.JOB, "connections": {"test_s3", "connection1", "connection2"}, "container": { "image": "test" }, }, })
def test_refs_pipeline(self): run_config = V1CompiledOperation.read( [ os.path.abspath("tests/fixtures/pipelines/ref_pipeline.yml"), {"kind": "compiled_operation"}, ] ) with patch("polyaxon.config_reader.spec.ConfigSpec.read") as config_read: config_read.return_value = V1Component( kind="component", version="1.05", inputs=[V1IO(name="str-input", iotype="str")], run=V1Job(container=V1Container(name="test")), ).to_dict() compiled_op = CompiledOperationSpecification.apply_context(run_config) assert compiled_op.run is not None assert len(compiled_op.run.operations) == 2 assert compiled_op.run.operations[0].name == "ref-path-op" assert compiled_op.run.operations[1].name == "ref-url-op"
def resolve(run: BaseRun, resolver_cls=None): resolver_cls = resolver_cls or CorePlatformResolver try: project = run.project return resolver.resolve( run=run, compiled_operation=V1CompiledOperation.read(run.content), owner_name=project.owner.name, project_name=project.name, project_uuid=project.uuid.hex, run_uuid=run.uuid.hex, run_name=run.name, run_path=run.subpath, resolver_cls=resolver_cls, params=None, ) except ( AccessNotAuthorized, AccessNotFound, ValidationError, PolyaxonSchemaError, ) as e: raise PolyaxonCompilerError("Compilation Error: %s" % e) from e
def test_resolve_connections_with_invalid_config(self): fpath = tempfile.mkdtemp() AgentManager.CONFIG_PATH = fpath secret1 = V1K8sResourceType( name="secret1", schema=V1K8sResourceSchema(name="secret1"), is_requested=True, ) secret2 = V1K8sResourceType( name="secret2", schema=V1K8sResourceSchema(name="secret2"), is_requested=True, ) connection1 = V1ConnectionType( name="test_s3", kind=V1ConnectionKind.S3, schema=V1BucketConnection(bucket="s3//:foo"), secret=secret1.schema, ) connection2 = V1ConnectionType( name="test_gcs", kind=V1ConnectionKind.GCS, schema=V1BucketConnection(bucket="gcs//:foo"), secret=secret1.schema, ) connection3 = V1ConnectionType( name="test_wasb", kind=V1ConnectionKind.WASB, schema=V1BucketConnection(bucket="wasbs//:foo"), secret=secret2.schema, ) settings.AGENT_CONFIG = AgentConfig( namespace="foo", artifacts_store=connection1, connections=[connection2, connection3], ) resolver = BaseResolver( run=None, compiled_operation=self.compiled_operation, owner_name="user", project_name="p1", project_uuid=None, run_name="j1", run_uuid=None, run_path="test", params=None, ) resolver.resolve_connections() assert resolver.namespace == "foo" assert resolver.connection_by_names == {connection1.name: connection1} assert resolver.artifacts_store == connection1 assert [s.schema for s in resolver.secrets] == [secret1.schema, secret2.schema] assert resolver.polyaxon_sidecar == get_default_sidecar_container() assert resolver.polyaxon_init == get_default_init_container() # Add run spec to resolve connections compiled_operation = V1CompiledOperation.read({ "version": 1.1, "kind": kinds.COMPILED_OPERATION, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": False, "collectResources": False, }, "run": { "kind": V1RunKind.JOB, "container": { "image": "test" }, "connections": {connection3.name}, }, }) resolver = BaseResolver( run=None, compiled_operation=compiled_operation, owner_name="user", project_name="p1", project_uuid=None, run_name="j1", run_uuid=None, run_path="test", params=None, ) resolver.resolve_connections() assert resolver.namespace == "foo" assert resolver.connection_by_names == { connection1.name: connection1, connection3.name: connection3, } assert [s.schema for s in resolver.secrets] == [secret1.schema, secret2.schema] assert resolver.artifacts_store == connection1 assert resolver.polyaxon_sidecar == get_default_sidecar_container() assert resolver.polyaxon_init == get_default_init_container() # Add run spec to resolve connections compiled_operation = V1CompiledOperation.read({ "version": 1.1, "kind": kinds.COMPILED_OPERATION, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": False, "collectResources": False, }, "run": { "kind": V1RunKind.JOB, "container": { "image": "test" }, "connections": { connection1.name, connection2.name, connection3.name, }, }, }) resolver = BaseResolver( run=None, compiled_operation=compiled_operation, owner_name="user", project_name="p1", project_uuid=None, run_name="j1", run_uuid=None, run_path="test", params=None, ) resolver.resolve_connections() assert resolver.namespace == "foo" assert resolver.connection_by_names == { connection3.name: connection3, connection2.name: connection2, connection1.name: connection1, } assert [s.schema for s in resolver.secrets] == [secret1.schema, secret2.schema] assert resolver.artifacts_store == connection1 assert resolver.polyaxon_sidecar == get_default_sidecar_container() assert resolver.polyaxon_init == get_default_init_container()
def test_build_run_pipeline(self): run_config = V1CompiledOperation.read( [ os.path.abspath("tests/fixtures/pipelines/build_run_pipeline.yml"), {"kind": "compiled_operation"}, ] ) run_config = CompiledOperationSpecification.apply_context(run_config) assert len(run_config.run.operations) == 2 assert run_config.run.operations[0].name == "build" assert run_config.run.operations[1].name == "run" assert run_config.is_dag_run is True assert run_config.has_pipeline is True assert run_config.schedule is None assert len(run_config.run.components) == 2 assert run_config.run.components[0].name == "experiment-template" assert run_config.run.components[0].termination.to_dict() == {"maxRetries": 2} assert run_config.run.components[0].run.to_dict() == { "kind": V1RunKind.JOB, "environment": { "nodeSelector": {"polyaxon": "experiments"}, "serviceAccountName": "service", "imagePullSecrets": ["secret1", "secret2"], }, "container": { "image": "{{ image }}", "command": ["python3", "main.py"], "args": "--lr={{ lr }}", "name": "polyaxon-main", "resources": {"requests": {"cpu": 1}}, }, } assert run_config.run.components[1].name == "build-template" assert run_config.run.components[1].run.container.image == "base" assert run_config.run.operations[0].name == "build" # Create a an op spec run_config.run.set_op_component("run") assert run_config.run.operations[1].has_component_reference is True job_config = run_config.run.get_op_spec_by_index(1) assert {p: job_config.params[p].to_dict() for p in job_config.params} == { "image": {"value": "outputs.docker-image", "ref": "ops.build"}, "lr": {"value": 0.001}, } run_config = OperationSpecification.compile_operation(job_config) run_config.apply_params({"image": {"value": "foo"}, "lr": {"value": 0.001}}) run_config = CompiledOperationSpecification.apply_context(run_config) run_config = CompiledOperationSpecification.apply_run_contexts(run_config) assert run_config.termination.to_dict() == {"maxRetries": 2} assert run_config.run.to_dict() == { "kind": V1RunKind.JOB, "environment": { "nodeSelector": {"polyaxon": "experiments"}, "serviceAccountName": "service", "imagePullSecrets": ["secret1", "secret2"], }, "container": { "image": "foo", "command": ["python3", "main.py"], "args": "--lr=0.001", "name": "polyaxon-main", "resources": {"requests": {"cpu": 1}}, }, }
def test_specification_with_context_requirement(self): context_root = container_contexts.CONTEXT_ROOT contexts = { "globals": { "owner_name": "user", "project_name": "project", "project_unique_name": "user.project", "project_uuid": "uuid", "run_info": "user.project.runs.uuid", "name": "run", "uuid": "uuid", "context_path": "/plx-context", "artifacts_path": "{}/artifacts".format(context_root), "run_artifacts_path": "{}/artifacts/test".format(context_root), "run_outputs_path": "{}/artifacts/test/outputs".format(context_root), "namespace": "test", "iteration": 12, "ports": [1212, 1234], "base_url": "/services/v1/test/user/project/runs/uuid", "created_at": None, "compiled_at": None, "cloning_kind": None, "original_uuid": None, }, "init": {}, "connections": {"foo": {"key": "connection-value"}}, } run_config = CompiledOperationSpecification.read( [ os.path.abspath( "tests/fixtures/plain/polyaxonfile_with_contexts_requirements.yaml" ), {"kind": "compiled_operation"}, ] ) run_config = CompiledOperationSpecification.apply_operation_contexts(run_config) expected_run = { "kind": V1RunKind.JOB, "init": [ { "artifacts": {"files": ["{{globals.run_outputs_path}}/foo"]}, "connection": "{{connections['foo']['key']}}", } ], "container": { "image": "continuumio/miniconda3", "command": ["python"], "workingDir": "{{ globals.artifacts_path }}/repo", "args": ["-c \"print('Tweet tweet')\""], "name": "polyaxon-main", }, } assert run_config.run.to_dict() == expected_run run_config = V1CompiledOperation.read(run_config.to_dict()) run_config = CompiledOperationSpecification.apply_operation_contexts(run_config) assert run_config.run.to_dict() == expected_run expected_run = { "kind": V1RunKind.JOB, "init": [ { "artifacts": { "files": [ "{}/artifacts/test/outputs/foo".format( container_contexts.CONTEXT_ROOT ) ], }, "connection": "connection-value", } ], "container": { "image": "continuumio/miniconda3", "command": ["python"], "workingDir": "{}/artifacts/repo".format( container_contexts.CONTEXT_ROOT ), "args": ["-c \"print('Tweet tweet')\""], "name": "polyaxon-main", }, } run_config = CompiledOperationSpecification.apply_runtime_contexts( run_config, contexts=contexts ) assert run_config.run.to_dict() == expected_run