def test_format_cmd(): """Test format_cmd.""" test_cmd = "ls -l" test_cmd_fail = 12 assert isinstance(format_cmd(test_cmd), list) with raises(ValueError): format_cmd(test_cmd_fail)
def _create_job_spec(self, name, command=None, image=None, env_vars=None): """Instantiate a Kubernetes job. :param name: Name of the job. :param image: Docker image to use to run the job on. :param command: List of commands to run on the given job. :param env_vars: List of environment variables (dictionaries) to inject into the workflow engine container. """ image = image or self._workflow_engine_image() command = command or self._workflow_engine_command() workflow_engine_env_vars = env_vars or self._workflow_engine_env_vars() job_controller_env_vars = [] owner_id = str(self.workflow.owner_id) command = format_cmd(command) workspace_mount, _ = get_shared_volume( self.workflow.get_workspace(), SHARED_VOLUME_PATH ) db_mount, _ = get_shared_volume( 'db', SHARED_VOLUME_PATH ) workflow_metadata = client.V1ObjectMeta(name=name) job = client.V1Job() job.api_version = 'batch/v1' job.kind = 'Job' job.metadata = workflow_metadata spec = client.V1JobSpec( template=client.V1PodTemplateSpec()) spec.template.metadata = workflow_metadata workflow_enginge_container = client.V1Container( name=current_app.config['WORKFLOW_ENGINE_NAME'], image=image, image_pull_policy='IfNotPresent', env=[], volume_mounts=[], command=['/bin/bash', '-c'], args=command) job_controller_address = [ { 'name': 'JOB_CONTROLLER_SERVICE_PORT_HTTP', 'value': str(current_app.config['JOB_CONTROLLER_CONTAINER_PORT']) }, { 'name': 'JOB_CONTROLLER_SERVICE_HOST', 'value': 'localhost'} ] workflow_engine_env_vars.extend(job_controller_address) workflow_enginge_container.env.extend(workflow_engine_env_vars) workflow_enginge_container.security_context = \ client.V1SecurityContext( run_as_group=WORKFLOW_RUNTIME_USER_GID, run_as_user=WORKFLOW_RUNTIME_USER_UID ) workflow_enginge_container.volume_mounts = [workspace_mount] secrets_store = REANAUserSecretsStore(owner_id) job_controller_env_secrets = secrets_store.\ get_env_secrets_as_k8s_spec() user = \ secrets_store.get_secret_value('HTCONDORCERN_USERNAME') or \ WORKFLOW_RUNTIME_USER_NAME job_controller_container = client.V1Container( name=current_app.config['JOB_CONTROLLER_NAME'], image=current_app.config['JOB_CONTROLLER_IMAGE'], image_pull_policy='IfNotPresent', env=[], volume_mounts=[], command=['/bin/bash', '-c'], args=self._create_job_controller_startup_cmd(user), ports=[]) if os.getenv('FLASK_ENV') == 'development': job_controller_env_vars.extend( current_app.config['DEBUG_ENV_VARS']) job_controller_env_vars.extend([ { 'name': 'REANA_USER_ID', 'value': owner_id }, { 'name': 'CERN_USER', 'value': user }, { 'name': 'USER', # Required by HTCondor 'value': user } ]) job_controller_container.env.extend(job_controller_env_vars) job_controller_container.env.extend(job_controller_env_secrets) job_controller_container.env.extend([ { 'name': 'REANA_SQLALCHEMY_DATABASE_URI', 'value': SQLALCHEMY_DATABASE_URI }, { 'name': 'REANA_STORAGE_BACKEND', 'value': REANA_STORAGE_BACKEND } ]) secrets_volume_mount = \ secrets_store.get_secrets_volume_mount_as_k8s_spec() job_controller_container.volume_mounts = [workspace_mount, db_mount] job_controller_container.volume_mounts.append(secrets_volume_mount) job_controller_container.ports = [{ "containerPort": current_app.config['JOB_CONTROLLER_CONTAINER_PORT'] }] containers = [workflow_enginge_container, job_controller_container] spec.template.spec = client.V1PodSpec( containers=containers) spec.template.spec.volumes = [ KubernetesWorkflowRunManager.k8s_shared_volume [REANA_STORAGE_BACKEND], secrets_store.get_file_secrets_volume_as_k8s_specs(), ] job.spec = spec job.spec.template.spec.restart_policy = 'Never' job.spec.ttl_seconds_after_finished = TTL_SECONDS_AFTER_FINISHED job.spec.backoff_limit = 0 return job
def _create_job_spec( self, name, command=None, image=None, env_vars=None, overwrite_input_parameters=None, overwrite_operational_options=None, ): """Instantiate a Kubernetes job. :param name: Name of the job. :param image: Docker image to use to run the job on. :param command: List of commands to run on the given job. :param env_vars: List of environment variables (dictionaries) to inject into the workflow engine container. :param interactive_session_type: One of the available interactive session types. :param overwrite_input_params: Dictionary with parameters to be overwritten or added to the current workflow run. :param type: Dict :param overwrite_operational_options: Dictionary with operational options to be overwritten or added to the current workflow run. :param type: Dict """ image = image or self._workflow_engine_image() command = command or self._workflow_engine_command( overwrite_input_parameters=overwrite_input_parameters, overwrite_operational_options=overwrite_operational_options, ) workflow_engine_env_vars = env_vars or self._workflow_engine_env_vars() job_controller_env_vars = [] owner_id = str(self.workflow.owner_id) command = format_cmd(command) workspace_mount, workspace_volume = get_workspace_volume( self.workflow.workspace_path) db_mount, shared_volume = get_shared_volume("db") workflow_metadata = client.V1ObjectMeta( name=name, labels={ "reana_workflow_mode": "batch", "reana-run-batch-workflow-uuid": str(self.workflow.id_), }, namespace=REANA_RUNTIME_KUBERNETES_NAMESPACE, ) secrets_store = REANAUserSecretsStore(owner_id) kerberos = None if self.requires_kerberos(): kerberos = get_kerberos_k8s_config( secrets_store, kubernetes_uid=WORKFLOW_RUNTIME_USER_UID, ) job = client.V1Job() job.api_version = "batch/v1" job.kind = "Job" job.metadata = workflow_metadata spec = client.V1JobSpec(template=client.V1PodTemplateSpec()) spec.template.metadata = workflow_metadata workflow_engine_container = client.V1Container( name=current_app.config["WORKFLOW_ENGINE_NAME"], image=image, image_pull_policy="IfNotPresent", env=[], volume_mounts=[], command=["/bin/bash", "-c"], args=command, ) workflow_engine_env_vars.extend([ { "name": "REANA_JOB_CONTROLLER_SERVICE_PORT_HTTP", "value": str(current_app.config["JOB_CONTROLLER_CONTAINER_PORT"]), }, { "name": "REANA_JOB_CONTROLLER_SERVICE_HOST", "value": "localhost" }, { "name": "REANA_COMPONENT_PREFIX", "value": REANA_COMPONENT_PREFIX }, { "name": "REANA_COMPONENT_NAMING_SCHEME", "value": REANA_COMPONENT_NAMING_SCHEME, }, { "name": "REANA_INFRASTRUCTURE_KUBERNETES_NAMESPACE", "value": REANA_INFRASTRUCTURE_KUBERNETES_NAMESPACE, }, { "name": "REANA_RUNTIME_KUBERNETES_NAMESPACE", "value": REANA_RUNTIME_KUBERNETES_NAMESPACE, }, { "name": "REANA_JOB_CONTROLLER_CONNECTION_CHECK_SLEEP", "value": str(REANA_JOB_CONTROLLER_CONNECTION_CHECK_SLEEP), }, ]) workflow_engine_container.env.extend(workflow_engine_env_vars) workflow_engine_container.security_context = client.V1SecurityContext( run_as_group=WORKFLOW_RUNTIME_USER_GID, run_as_user=WORKFLOW_RUNTIME_USER_UID, ) workflow_engine_container.volume_mounts = [workspace_mount] if kerberos: workflow_engine_container.volume_mounts += kerberos.volume_mounts workflow_engine_container.env += kerberos.env job_controller_env_secrets = secrets_store.get_env_secrets_as_k8s_spec( ) user = secrets_store.get_secret_value( "CERN_USER") or WORKFLOW_RUNTIME_USER_NAME job_controller_container = client.V1Container( name=current_app.config["JOB_CONTROLLER_NAME"], image=current_app.config["JOB_CONTROLLER_IMAGE"], image_pull_policy="IfNotPresent", env=[], volume_mounts=[], command=["/bin/bash", "-c"], args=self._create_job_controller_startup_cmd(user), ports=[], ) job_controller_env_vars.extend([ { "name": "REANA_USER_ID", "value": owner_id }, { "name": "CERN_USER", "value": user }, { "name": "USER", "value": user }, # Required by HTCondor { "name": "K8S_CERN_EOS_AVAILABLE", "value": K8S_CERN_EOS_AVAILABLE }, { "name": "IMAGE_PULL_SECRETS", "value": ",".join(IMAGE_PULL_SECRETS) }, { "name": "REANA_SQLALCHEMY_DATABASE_URI", "value": SQLALCHEMY_DATABASE_URI, }, { "name": "REANA_STORAGE_BACKEND", "value": REANA_STORAGE_BACKEND }, { "name": "REANA_COMPONENT_PREFIX", "value": REANA_COMPONENT_PREFIX }, { "name": "REANA_COMPONENT_NAMING_SCHEME", "value": REANA_COMPONENT_NAMING_SCHEME, }, { "name": "REANA_INFRASTRUCTURE_KUBERNETES_NAMESPACE", "value": REANA_INFRASTRUCTURE_KUBERNETES_NAMESPACE, }, { "name": "REANA_RUNTIME_KUBERNETES_NAMESPACE", "value": REANA_RUNTIME_KUBERNETES_NAMESPACE, }, { "name": "REANA_JOB_HOSTPATH_MOUNTS", "value": json.dumps(REANA_JOB_HOSTPATH_MOUNTS), }, { "name": "REANA_RUNTIME_KUBERNETES_KEEP_ALIVE_JOBS_WITH_STATUSES", "value": ",".join( REANA_RUNTIME_KUBERNETES_KEEP_ALIVE_JOBS_WITH_STATUSES), }, { "name": "REANA_KUBERNETES_JOBS_MEMORY_LIMIT", "value": REANA_KUBERNETES_JOBS_MEMORY_LIMIT, }, { "name": "REANA_KUBERNETES_JOBS_MAX_USER_MEMORY_LIMIT", "value": REANA_KUBERNETES_JOBS_MAX_USER_MEMORY_LIMIT, }, { "name": "REANA_KUBERNETES_JOBS_TIMEOUT_LIMIT", "value": REANA_KUBERNETES_JOBS_TIMEOUT_LIMIT, }, { "name": "REANA_KUBERNETES_JOBS_MAX_USER_TIMEOUT_LIMIT", "value": REANA_KUBERNETES_JOBS_MAX_USER_TIMEOUT_LIMIT, }, { "name": "WORKSPACE_PATHS", "value": json.dumps(WORKSPACE_PATHS) }, ]) job_controller_container.env.extend(job_controller_env_vars) job_controller_container.env.extend(job_controller_env_secrets) if REANA_RUNTIME_JOBS_KUBERNETES_NODE_LABEL: job_controller_container.env.append( { "name": "REANA_RUNTIME_JOBS_KUBERNETES_NODE_LABEL", "value": os.getenv("REANA_RUNTIME_JOBS_KUBERNETES_NODE_LABEL"), }, ) secrets_volume_mount = secrets_store.get_secrets_volume_mount_as_k8s_spec( ) job_controller_container.volume_mounts = [workspace_mount, db_mount] job_controller_container.volume_mounts.append(secrets_volume_mount) job_controller_container.ports = [{ "containerPort": current_app.config["JOB_CONTROLLER_CONTAINER_PORT"] }] containers = [workflow_engine_container, job_controller_container] spec.template.spec = client.V1PodSpec( containers=containers, node_selector=REANA_RUNTIME_BATCH_KUBERNETES_NODE_LABEL, init_containers=[], ) spec.template.spec.service_account_name = ( REANA_RUNTIME_KUBERNETES_SERVICEACCOUNT_NAME) volumes = [ workspace_volume, shared_volume, secrets_store.get_file_secrets_volume_as_k8s_specs(), ] if kerberos: volumes += kerberos.volumes spec.template.spec.init_containers.append(kerberos.init_container) # filter out volumes with the same name spec.template.spec.volumes = list({v["name"]: v for v in volumes}.values()) if os.getenv("FLASK_ENV") == "development": code_volume_name = "reana-code" code_mount_path = "/code" k8s_code_volume = client.V1Volume(name=code_volume_name) k8s_code_volume.host_path = client.V1HostPathVolumeSource( code_mount_path) spec.template.spec.volumes.append(k8s_code_volume) for container in spec.template.spec.containers: container.env.extend(current_app.config["DEBUG_ENV_VARS"]) sub_path = f"reana-{container.name}" if container.name == "workflow-engine": sub_path += f"-{self.workflow.type_}" container.volume_mounts.append({ "name": code_volume_name, "mountPath": code_mount_path, "subPath": sub_path, }) job.spec = spec job.spec.template.spec.restart_policy = "Never" job.spec.backoff_limit = 0 return job
def _create_job_spec(self, name, command=None, image=None, env_vars=None, overwrite_input_parameters=None, overwrite_operational_options=None): """Instantiate a Kubernetes job. :param name: Name of the job. :param image: Docker image to use to run the job on. :param command: List of commands to run on the given job. :param env_vars: List of environment variables (dictionaries) to inject into the workflow engine container. :param interactive_session_type: One of the available interactive session types. :param overwrite_input_params: Dictionary with parameters to be overwritten or added to the current workflow run. :param type: Dict :param overwrite_operational_options: Dictionary with operational options to be overwritten or added to the current workflow run. :param type: Dict """ image = image or self._workflow_engine_image() command = command or self._workflow_engine_command( overwrite_input_parameters=overwrite_input_parameters, overwrite_operational_options=overwrite_operational_options) workflow_engine_env_vars = env_vars or self._workflow_engine_env_vars() job_controller_env_vars = [] owner_id = str(self.workflow.owner_id) command = format_cmd(command) workspace_mount, workspace_volume = \ get_shared_volume(self.workflow.workspace_path) db_mount, _ = get_shared_volume('db') workflow_metadata = client.V1ObjectMeta( name=name, labels={'reana_workflow_mode': 'batch'}) job = client.V1Job() job.api_version = 'batch/v1' job.kind = 'Job' job.metadata = workflow_metadata spec = client.V1JobSpec(template=client.V1PodTemplateSpec()) spec.template.metadata = workflow_metadata workflow_engine_container = client.V1Container( name=current_app.config['WORKFLOW_ENGINE_NAME'], image=image, image_pull_policy='IfNotPresent', env=[], volume_mounts=[], command=['/bin/bash', '-c'], args=command) job_controller_address = [{ 'name': 'REANA_JOB_CONTROLLER_SERVICE_PORT_HTTP', 'value': str(current_app.config['JOB_CONTROLLER_CONTAINER_PORT']) }, { 'name': 'REANA_JOB_CONTROLLER_SERVICE_HOST', 'value': 'localhost' }] workflow_engine_env_vars.extend(job_controller_address) workflow_engine_container.env.extend(workflow_engine_env_vars) workflow_engine_container.security_context = \ client.V1SecurityContext( run_as_group=WORKFLOW_RUNTIME_USER_GID, run_as_user=WORKFLOW_RUNTIME_USER_UID ) workflow_engine_container.volume_mounts = [workspace_mount] secrets_store = REANAUserSecretsStore(owner_id) job_controller_env_secrets = secrets_store.\ get_env_secrets_as_k8s_spec() user = \ secrets_store.get_secret_value('CERN_USER') or \ WORKFLOW_RUNTIME_USER_NAME job_controller_container = client.V1Container( name=current_app.config['JOB_CONTROLLER_NAME'], image=current_app.config['JOB_CONTROLLER_IMAGE'], image_pull_policy='IfNotPresent', env=[], volume_mounts=[], command=['/bin/bash', '-c'], args=self._create_job_controller_startup_cmd(user), ports=[]) job_controller_env_vars.extend([ { 'name': 'REANA_USER_ID', 'value': owner_id }, { 'name': 'CERN_USER', 'value': user }, { 'name': 'USER', # Required by HTCondor 'value': user }, { 'name': 'K8S_CERN_EOS_AVAILABLE', 'value': K8S_CERN_EOS_AVAILABLE }, { 'name': 'IMAGE_PULL_SECRETS', 'value': ','.join(IMAGE_PULL_SECRETS) } ]) job_controller_container.env.extend(job_controller_env_vars) job_controller_container.env.extend(job_controller_env_secrets) job_controller_container.env.extend([{ 'name': 'REANA_SQLALCHEMY_DATABASE_URI', 'value': SQLALCHEMY_DATABASE_URI }, { 'name': 'REANA_STORAGE_BACKEND', 'value': REANA_STORAGE_BACKEND }]) secrets_volume_mount = \ secrets_store.get_secrets_volume_mount_as_k8s_spec() job_controller_container.volume_mounts = [workspace_mount, db_mount] job_controller_container.volume_mounts.append(secrets_volume_mount) job_controller_container.ports = [{ "containerPort": current_app.config['JOB_CONTROLLER_CONTAINER_PORT'] }] containers = [workflow_engine_container, job_controller_container] spec.template.spec = client.V1PodSpec(containers=containers) spec.template.spec.service_account_name = \ K8S_REANA_SERVICE_ACCOUNT_NAME spec.template.spec.volumes = [ workspace_volume, secrets_store.get_file_secrets_volume_as_k8s_specs(), ] if os.getenv('FLASK_ENV') == 'development': code_volume_name = 'reana-code' code_mount_path = '/code' k8s_code_volume = client.V1Volume(name=code_volume_name) k8s_code_volume.host_path = client.V1HostPathVolumeSource( code_mount_path) spec.template.spec.volumes.append(k8s_code_volume) for container in spec.template.spec.containers: container.env.extend(current_app.config['DEBUG_ENV_VARS']) sub_path = f'reana-{container.name}' if container.name == 'workflow-engine': sub_path += f'-{self.workflow.type_}' container.volume_mounts.append({ 'name': code_volume_name, 'mountPath': code_mount_path, 'subPath': sub_path }) job.spec = spec job.spec.template.spec.restart_policy = 'Never' job.spec.ttl_seconds_after_finished = TTL_SECONDS_AFTER_FINISHED job.spec.backoff_limit = 0 return job