def submit( self, # type: ignore save_as_name: Union[str, os.PathLike, None] = None, file: Union[str, os.PathLike, None] = None, job_resource: BaseJob = None, **kwargs: Any) -> Optional[JobBaseResource]: if file: yaml_job = load_yaml(file) job_resource = self._create_job_resource(cfg=yaml_job, file=file, **kwargs) rest_job_resource = job_resource.translate_to_rest_object() result = self._operation.create_or_update( id=rest_job_resource.name, # type: ignore subscription_id=self._workspace_scope.subscription_id, resource_group_name=self._workspace_scope.resource_group_name, workspace_name=self._workspace_name, body=rest_job_resource, api_version=API_VERSION_2020_09_01_PREVIEW, **self._kwargs) # TODO: set default api_version to this if save_as_name is not None: yaml_serialized = self._dump(result) with open(save_as_name, 'w') as f: yaml.dump(yaml_serialized, f, default_flow_style=False) return result
def load_from_file(self, data, **kwargs): if isinstance(data, str) and data.startswith(YAML_FILE_PREFIX): self._previous_base_path = Path( self.context[BASE_PATH_CONTEXT_KEY]) # Use directly if absolute path path = Path(data[len(YAML_FILE_PREFIX):]) if not path.is_absolute(): path = self._previous_base_path / path path.resolve() # Push update self.context[BASE_PATH_CONTEXT_KEY] = path.parent data = load_yaml(path) return data return data
def _load(self, file: Union[str, os.PathLike, None], **kwargs: Any) -> InternalModel: cfg = load_yaml(file) context = { BASE_PATH_CONTEXT_KEY: Path("./") if file is None else Path(file).parent, WORKSPACE_CONTEXT_KEY: self._workspace_scope, PARAMS_OVERRIDE_KEY: kwargs.get(PARAMS_OVERRIDE_KEY, None), } try: return ModelSchema(context=context).load( cfg, unknown=RAISE) # type: ignore except ValidationError as e: raise Exception( f"Error while parsing yaml file: {file} \n\n {str(e)}")
def _load_endpoint(self, file: Union[str, os.PathLike], endpoint_type: str) -> InternalEndpoint: config = load_yaml(file) if not endpoint_type: endpoint_type = config["type"] self._throw_if_no_endpoint_type(endpoint_type) context = { BASE_PATH_CONTEXT_KEY: Path(file).parent, WORKSPACE_CONTEXT_KEY: self._workspace_scope } try: if endpoint_type.lower() == ONLINE_ENDPOINT_TYPE: return OnlineEndpointSchema(context=context).load( config, unknown=RAISE) else: return BatchEndpointSchema(context=context).load(config, unknown=RAISE) except ValidationError as e: raise Exception( f"Error while parsing yaml file: {file} \n\n {str(e)}")
def translate_to_rest_object( self) -> EnvironmentSpecificationVersionResource: # TODO: Revisit this to have validation done by schema as a standard practice self.validate() properties = {} base_path = self._base_path if self.path is not None: base_path = Path(base_path, self.path) if self.conda_file is not None: conda = load_yaml(Path(base_path, self.conda_file)) properties.update({"conda_file": yaml.dump(conda)}) if self.docker is not None: if self.docker.image is not None: docker_image = DockerImage(docker_image_uri=self.docker.image) properties.update({"docker": docker_image}) if self.docker.build is not None: docker_build = DockerBuild(dockerfile=load_file( Path(base_path, self.docker.build.get("dockerfile")))) properties.update({"docker": docker_build}) environment_specification_version = EnvironmentSpecificationVersionResource( properties=EnvironmentSpecificationVersion(**properties)) return environment_specification_version