def start_batch_workflow_run(self, overwrite_input_params=None, overwrite_operational_options=None): """Start a batch workflow run. :param overwrite_input_params: Dictionary with parameters to be overwritten or added to the current workflow run. :param type: Dict :param overwrite_operational_options: Dictionary with operational options to be overwritten or added to the current workflow run. :param type: Dict """ workflow_run_name = self._workflow_run_name_generator('batch') job = self._create_job_spec( workflow_run_name, overwrite_input_parameters=overwrite_input_params, overwrite_operational_options=overwrite_operational_options) try: current_k8s_batchv1_api_client.create_namespaced_job( namespace=KubernetesWorkflowRunManager.default_namespace, body=job) except ApiException as e: msg = 'Workflow engine/job controller pod ' \ 'creation failed {}'.format(e) logging.error(msg, exc_info=True) raise e
def start_batch_workflow_run(self, overwrite_input_params=None, overwrite_operational_options=None): """Start a batch workflow run. :param overwrite_input_params: Dictionary with parameters to be overwritten or added to the current workflow run. :param type: Dict :param overwrite_operational_options: Dictionary with operational options to be overwritten or added to the current workflow run. :param type: Dict """ workflow_run_name = self._workflow_run_name_generator("batch") job = self._create_job_spec( workflow_run_name, overwrite_input_parameters=overwrite_input_params, overwrite_operational_options=overwrite_operational_options, ) try: current_k8s_batchv1_api_client.create_namespaced_job( namespace=REANA_RUNTIME_KUBERNETES_NAMESPACE, body=job) except ApiException as e: msg = "Workflow engine/job controller pod " "creation failed {}".format( e) logging.error(msg, exc_info=True) raise e
def start_batch_workflow_run(self): """Start a batch workflow run.""" workflow_run_name = self._workflow_run_name_generator('batch') job = self._create_job_spec(workflow_run_name) current_k8s_batchv1_api_client.create_namespaced_job( namespace=KubernetesWorkflowRunManager.default_namespace, body=job)
def _submit(self): """Submit job and return its backend id.""" try: current_k8s_batchv1_api_client.create_namespaced_job( namespace=REANA_RUNTIME_KUBERNETES_NAMESPACE, body=self.job ) return self.job["metadata"]["name"] except ApiException as e: logging.error("Error while connecting to Kubernetes" " API: {}".format(e)) except Exception as e: logging.error(traceback.format_exc()) logging.debug("Unexpected error: {}".format(e))
def start_batch_workflow_run(self): """Start a batch workflow run.""" workflow_run_name = self._workflow_run_name_generator('batch') job = self._create_job_spec(workflow_run_name) try: current_k8s_batchv1_api_client.create_namespaced_job( namespace=KubernetesWorkflowRunManager.default_namespace, body=job) except ApiException as e: msg = 'Workflow engine/job controller pod ' \ 'creation failed {}'.format(e) logging.error(msg, exc_info=True) raise REANAWorkflowControllerError(e)
def _submit(self): """Submit job and return its backend id.""" try: api_response = \ current_k8s_batchv1_api_client.create_namespaced_job( namespace=K8S_DEFAULT_NAMESPACE, body=self.job) return self.job['metadata']['name'] except ApiException as e: logging.error("Error while connecting to Kubernetes" " API: {}".format(e)) except Exception as e: logging.error(traceback.format_exc()) logging.debug("Unexpected error: {}".format(e))
def execute(self): """Execute a job in Kubernetes.""" backend_job_id = str(uuid.uuid4()) job = { 'kind': 'Job', 'apiVersion': 'batch/v1', 'metadata': { 'name': backend_job_id, 'namespace': K8S_DEFAULT_NAMESPACE }, 'spec': { 'backoffLimit': MAX_JOB_RESTARTS, 'autoSelector': True, 'template': { 'metadata': { 'name': backend_job_id }, 'spec': { 'containers': [ { 'image': self.docker_img, 'command': self.cmd, 'name': backend_job_id, 'env': [], 'volumeMounts': [] }, ], 'volumes': [], 'restartPolicy': 'Never' } } } } if self.env_vars: for var, value in self.env_vars.items(): job['spec']['template']['spec']['containers'][0]['env'].append( { 'name': var, 'value': value }) if self.shared_file_system: self.add_shared_volume(job) if self.cvmfs_mounts != 'false': cvmfs_map = {} for cvmfs_mount_path in ast.literal_eval(self.cvmfs_mounts): if cvmfs_mount_path in CVMFS_REPOSITORIES: cvmfs_map[ CVMFS_REPOSITORIES[cvmfs_mount_path]] = \ cvmfs_mount_path for repository, mount_path in cvmfs_map.items(): volume = get_k8s_cvmfs_volume(repository) (job['spec']['template']['spec']['containers'][0] ['volumeMounts'].append({ 'name': volume['name'], 'mountPath': '/cvmfs/{}'.format(mount_path) })) job['spec']['template']['spec']['volumes'].append(volume) # add better handling try: api_response = \ current_k8s_batchv1_api_client.create_namespaced_job( namespace=K8S_DEFAULT_NAMESPACE, body=job) return backend_job_id except ApiException as e: logging.debug("Error while connecting to Kubernetes" " API: {}".format(e)) except Exception as e: logging.error(traceback.format_exc()) logging.debug("Unexpected error: {}".format(e))
def k8s_instantiate_job(job_id, workflow_workspace, docker_img, cmd, cvmfs_mounts, env_vars, shared_file_system, job_type, namespace='default'): """Create Kubernetes job. :param job_id: Job uuid. :param workflow_workspace: Absolute path to the job's workflow workspace. :param docker_img: Docker image to run the job. :param cmd: Command provided to the docker container. :param cvmfs_mounts: List of CVMFS volumes to mount in job pod. :param env_vars: Dictionary representing environment variables as {'var_name': 'var_value'}. :param namespace: Job's namespace. :shared_file_system: Boolean which represents whether the job should have a shared file system mounted. :returns: A :class:`kubernetes.client.models.v1_job.V1Job` corresponding to the created job, None if the creation could not take place. """ job = { 'kind': 'Job', 'apiVersion': 'batch/v1', 'metadata': { 'name': job_id, 'namespace': namespace }, 'spec': { 'backoffLimit': app.config['MAX_JOB_RESTARTS'], 'autoSelector': True, 'template': { 'metadata': { 'name': job_id }, 'spec': { 'containers': [ { 'name': job_id, 'image': docker_img, 'env': [], 'volumeMounts': [] }, ], 'volumes': [], 'restartPolicy': 'Never' } } } } if cmd: import shlex (job['spec']['template']['spec']['containers'][0]['command'] ) = shlex.split(cmd) if env_vars: for var, value in env_vars.items(): job['spec']['template']['spec']['containers'][0]['env'].append({ 'name': var, 'value': value }) if shared_file_system: add_shared_volume(job, workflow_workspace) if cvmfs_mounts != 'false': cvmfs_map = {} for cvmfs_mount_path in ast.literal_eval(cvmfs_mounts): if cvmfs_mount_path in CVMFS_REPOSITORIES: cvmfs_map[ CVMFS_REPOSITORIES[cvmfs_mount_path]] = cvmfs_mount_path for repository, mount_path in cvmfs_map.items(): volume = get_k8s_cvmfs_volume(repository) (job['spec']['template']['spec']['containers'][0] ['volumeMounts'].append({ 'name': volume['name'], 'mountPath': '/cvmfs/{}'.format(mount_path) })) job['spec']['template']['spec']['volumes'].append(volume) # add better handling try: api_response = \ current_k8s_batchv1_api_client.create_namespaced_job( namespace=namespace, body=job) return api_response except client.rest.ApiException as e: logging.debug("Error while connecting to Kubernetes API: {}".format(e)) except Exception as e: logging.error(traceback.format_exc()) logging.debug("Unexpected error: {}".format(e))