def get_op_specification(self, params=None, profile=None, queue=None, nocache=None): job_data = {"version": self.specification.version, "kind": kinds.OP} if params: if not isinstance(params, Mapping): raise PolyaxonfileError( "Params: `{}` must be a valid mapping".format(params) ) job_data["params"] = params if profile: job_data["profile"] = profile if queue: job_data["queue"] = queue if nocache is not None: job_data["nocache"] = nocache if self.specification.is_op: specification = get_specification( data=[self.specification.config.to_dict(), job_data] ) else: job_data["component"] = self.specification.config.to_dict() specification = get_specification(data=[job_data]) # Sanity check if params were passed run_spec = get_specification(specification.generate_run_data()) run_spec.validate_params(params=params, is_template=False) if run_spec.has_dag_run: run_spec.apply_context() return specification
def __init__(self, filepaths): filepaths = to_list(filepaths) for filepath in filepaths: if not os.path.isfile(filepath): raise PolyaxonfileError("`{}` must be a valid file".format(filepath)) self._filenames = [os.path.basename(filepath) for filepath in filepaths] self.specification = get_specification(data=reader.read(filepaths))
def create( self, name=None, tags=None, description=None, content=None, base_outputs_path=None, ): run = polyaxon_sdk.V1Run() if self.track_env: run.run_env = get_run_env() if name: run.name = name if tags: run.tags = tags if description: run.description = description if content: try: specification = get_specification(data=[content]) except Exception as e: raise PolyaxonClientException(e) run.content = specification.config_dump else: run.is_managed = False if self.client: try: run = self.client.runs_v1.create_run(owner=self.owner, project=self.project, body=run) except (ApiException, HTTPError) as e: raise PolyaxonClientException(e) if not run: raise PolyaxonClientException("Could not create a run.") if not settings.CLIENT_CONFIG.is_managed and self.track_logs: setup_logging(send_logs=self.send_logs) self._run = run self._run_uuid = run.uuid self.status = "created" # Setup the outputs store if self.outputs_store is None and base_outputs_path: outputs_path = "{}/{}/{}/{}".format(base_outputs_path, self.owner, self.project, self.run_uuid) self.set_outputs_store(outputs_path=outputs_path) if self.track_code: self.log_code_ref() if not settings.CLIENT_CONFIG.is_managed: self._start() else: self._register_wait() return self
def test_op_specification_with_override_info(self): config_dict = { "version": 0.6, "kind": "op", "name": "foo", "description": "a description", "tags": ["value"], "component_ref": {"name": "foo"}, "params": {"param1": "foo", "param2": "bar"}, "trigger": "all_succeeded", "component": { "name": "build-template", "tags": ["kaniko"], "init": {"repos": [{"name": "foo", "branch": "dev"}]}, "container": {"image": "test"}, }, } spec = OpSpecification.read(values=config_dict) assert spec.name == "foo" assert spec.description == "a description" assert spec.tags == ["value"] run_data = spec.generate_run_data() job_spec = get_specification(run_data) assert job_spec.config.name == "foo" assert job_spec.config.description == "a description" assert job_spec.tags == ["value"] assert job_spec.init.to_light_dict() == { "repos": [{"name": "foo", "branch": "dev"}] } assert job_spec.environment is None env = { "environment": { "resources": { "requests": {"gpu": 1, "tpu": 1}, "limits": {"gpu": 1, "tpu": 1}, } } } run_data = spec.generate_run_data(env) job_spec = get_specification(run_data) assert job_spec.environment.to_light_dict() == env["environment"]
def test_build_run_pipeline(self): plx_file = PolyaxonFile( os.path.abspath("tests/fixtures/pipelines/build_run_pipeline.yml")) spec = plx_file.specification spec = spec.apply_context() assert len(spec.workflow_strategy.ops) == 2 assert spec.workflow_strategy.ops[0].name == "build" assert spec.workflow_strategy.ops[1].name == "run" assert spec.config.workflow is not None assert spec.has_dag is True assert spec.has_pipeline is True assert spec.config.schedule is None assert len(spec.workflow_strategy.components) == 2 assert spec.workflow_strategy.components[ 0].name == "experiment-template" assert spec.workflow_strategy.components[0].container.to_dict() == { "image": "{{ image }}", "command": ["python3", "main.py"], "args": "--lr={{ lr }}", } assert spec.workflow_strategy.components[1].name == "build-template" assert spec.workflow_strategy.components[1].container.to_light_dict( ) == { "image": "base" } assert spec.workflow_strategy.components[1].init.build.to_light_dict( ) == { "image": "base", "env": "{{ env_vars }}", "name": POLYAXON_DOCKERFILE_NAME, "workdir": POLYAXON_DOCKER_WORKDIR, "shell": POLYAXON_DOCKER_SHELL, } # Create a an op spec spec.workflow_strategy.set_op_component("run") assert spec.workflow_strategy.ops[1].component is not None job_spec = OpSpecification(spec.workflow_strategy.ops[1].to_dict()) assert job_spec.config.params == { "image": "{{ ops.build.outputs.docker-image }}", "lr": 0.001, } op_spec = get_specification(job_spec.generate_run_data()) assert op_spec.is_component is True op_spec.apply_params({"image": "foo", "lr": 0.001}) op_spec = op_spec.apply_context() op_spec = op_spec.apply_container_contexts() assert op_spec.config.container.to_dict() == { "image": "foo", "command": ["python3", "main.py"], "args": "--lr=0.001", }
def generate(polyaxonfile, build_context, destination, params): """Generate a dockerfile given the polyaxonfile.""" if all([polyaxonfile, build_context]): Printer.print_error( "Only a polyaxonfile or a build context option is required.") sys.exit(1) if build_context: try: build_context = BuildContextConfig.from_dict( rhea.read(build_context)) except (RheaError, ValidationError) as e: Printer.print_error("received a non valid build context.") Printer.print_error("Error message: {}.".format(e)) sys.exit(1) else: specification = check_polyaxonfile(polyaxonfile, params=params, log=False) try: run_spec = get_specification(specification.generate_run_data()) run_spec.apply_params(params=specification.config.params) run_spec.apply_context() except PolyaxonSchemaError: Printer.print_error( "Could not run this polyaxonfile locally, " "a context is required to resolve it dependencies.") sys.exit(1) build_context = run_spec.build_context generator = DockerFileGenerator(build_context=build_context, destination=destination or ".") generator.create() Printer.print_success("Dockerfile was generated: `{}`".format( generator.dockerfile_path))
def run( ctx, project, polyaxonfile, name, tags, description, upload, log, local, conda_env, params, profile, nocache, ): """Run polyaxonfile specification. Examples: \b ```bash $ polyaxon run -f file -f file_override ... ``` Upload before running \b ```bash $ polyaxon run -f file -u ``` Run and set description and tags for this run \b ```bash $ polyaxon run -f file -u --description="Description of the current run" --tags="foo, bar, moo" ``` Run and set a unique name for this run \b ```bash polyaxon run --name=foo ``` Run for a specific project \b ```bash $ polyaxon run -p project1 -f file.yaml ``` Run with updated params \b ```bash $ polyaxon run -p project1 -f file.yaml -P param1=234.2 -P param2=relu ``` """ specification = check_polyaxonfile(polyaxonfile, params=params, profile=profile, nocache=nocache, log=False) owner, project_name = get_project_or_local(project) tags = validate_tags(tags) if local: try: run_spec = get_specification(specification.generate_run_data()) run_spec.apply_context() except PolyaxonSchemaError: Printer.print_error( "Could not run this polyaxonfile locally, " "a context is required to resolve it dependencies.") sys.exit(1) if conda_env: conda_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, specification=run_spec, log=log, conda_env=conda_env, ) else: docker_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, specification=run_spec, log=log, ) else: platform_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, specification=specification, upload=upload, log=log, can_upload=all([upload, project]), )