def __init__(self, skil, training_config, distributed_config, job_id=None, create=True): super(TrainingJob, self).__init__() self.skil = skil self.training_config = training_config self.tm = distributed_config.to_json() if create: training_create_job_request = skil_client.CreateJobRequest( compute_resource_id=self.training_config.compute_id, storage_resource_id=self.training_config.storage_id, job_args=self._training_job_args(), output_file_name=self.training_config.output_path) response = self.skil.api.create_job("TRAINING", training_create_job_request) else: response = self.skil.api.get_job_by_id(job_id) assert response.job_id == job_id self.job_id = response.job_id self.run_id = response.run_id self.status = response.status
def __init__(self, skil, inference_config): self.skil = skil self.inference_config = inference_config inference_create_job_request = skil_client.CreateJobRequest( compute_resource_id=self.inference_config.compute_id, storage_resource_id=self.inference_config.storage_id, job_args=self._inference_job_args(), output_file_name=self.inference_config.output_path ) self.skil.api.create_job("INFERENCE", inference_create_job_request)
def __init__(self, skil, training_config, distributed_config=None): self.skil = skil self.training_config = training_config self.tm = distributed_config training_create_job_request = skil_client.CreateJobRequest( compute_resource_id=self.training_config.compute_id, storage_resource_id=self.training_config.storage_id, job_args=self._training_job_args(), output_file_name=self.training_config.output_path ) # TODO: why do we need to specify the training type here if the request already knows it? self.skil.api.create_job("TRAINING", training_create_job_request)
def __init__(self, skil, inference_config, job_id=None, create=True): super(InferenceJob, self).__init__() self.skil = skil self.inference_config = inference_config if create: inference_create_job_request = skil_client.CreateJobRequest( compute_resource_id=self.inference_config.compute_id, storage_resource_id=self.inference_config.storage_id, job_args=self._inference_job_args(), output_file_name=self.inference_config.output_path) response = self.skil.api.create_job("INFERENCE", inference_create_job_request) else: response = self.skil.api.get_job_by_id(job_id) assert response.job_id == job_id self.job_id = response.job_id self.run_id = response.run_id self.status = response.status