def _collateral( self, project_uuid: str, task_id: str, pipeline: Pipeline, run_config: Dict[str, Any], env_variables: Dict[str, Any], **kwargs, ): # Get docker ids of images to use and make it so that the images # will not be deleted in case they become outdated by an # environment rebuild. try: env_uuid_docker_id_mappings = lock_environment_images_for_run( task_id, project_uuid, pipeline.get_environments(), ) except errors.ImageNotFound as e: msg = ( "Pipeline references environments that do not exist in the" f" project, the following environments do not exist: [{e}].\n\n" "Please make sure all pipeline steps are assigned an" " environment that exists in the project." ) raise errors.ImageNotFound(msg) # Create Celery object with the Flask context and construct the # kwargs for the job. celery = make_celery(current_app) run_config["env_uuid_docker_id_mappings"] = env_uuid_docker_id_mappings run_config["user_env_variables"] = env_variables celery_job_kwargs = { "pipeline_definition": pipeline.to_dict(), "project_uuid": project_uuid, "run_config": run_config, } # Start the run as a background task on Celery. Due to circular # imports we send the task by name instead of importing the # function directly. res = celery.send_task( "app.core.tasks.run_pipeline", kwargs=celery_job_kwargs, task_id=task_id, ) # NOTE: this is only if a backend is configured. The task does # not return anything. Therefore we can forget its result and # make sure that the Celery backend releases recourses (for # storing and transmitting results) associated to the task. # Uncomment the line below if applicable. res.forget()
def _collateral( self, project_uuid: str, task_id: str, pipeline: Pipeline, run_config: Dict[str, Any], env_variables: Dict[str, Any], env_uuid_to_image: Dict[str, str], **kwargs, ): # Create Celery object with the Flask context and construct the # kwargs for the job. celery = make_celery(current_app) run_config["env_uuid_to_image"] = env_uuid_to_image run_config["user_env_variables"] = env_variables run_config["session_uuid"] = ( project_uuid[:18] + pipeline.properties["uuid"][:18] ) run_config["session_type"] = "interactive" celery_job_kwargs = { "pipeline_definition": pipeline.to_dict(), "run_config": run_config, "session_uuid": run_config["session_uuid"], } # Start the run as a background task on Celery. Due to circular # imports we send the task by name instead of importing the # function directly. res = celery.send_task( "app.core.tasks.run_pipeline", kwargs=celery_job_kwargs, task_id=task_id, ) # NOTE: this is only if a backend is configured. The task does # not return anything. Therefore we can forget its result and # make sure that the Celery backend releases recourses (for # storing and transmitting results) associated to the task. # Uncomment the line below if applicable. res.forget()