def execute(self, context): """ Loads and merges data from report files of all finished tasks in a DAG. Relocates results to the "outputs_folder", removes "tmp_folder" """ setup_cwl_logger(context["ti"]) post_status(context) _, workflow_report = relocate_outputs( workflow=context["dag"].workflow, job_data=collect_reports(context), cwl_args=context["dag"].default_args["cwl"]) return workflow_report
def execute(self, context): """ Loads job Object from the context. Sets "tmp_folder" and "output_folder" if they have not been set before in the job. In case "tmp_folder" and/or "output_folder" were read from the job and are relative, resolves paths relative to the "tmp_folder" and/or "outputs_folder" from "cwl_args". Dumps step outputs as a json file into "tmp_folder". Writes to X-Com report file location. """ setup_cwl_logger(context["ti"]) post_status(context) # for easy access dag_id = context["dag"].dag_id workflow = context["dag"].workflow run_id = context["run_id"].replace(":", "_").replace( "+", "_") # to make it dumpable by json cwl_args = context["dag"].default_args["cwl"] # Loads job from dag_run configuration. Sets defaults from "workflow". Fails on missing input files job_data = load_job(workflow=workflow, job=context["dag_run"].conf["job"], cwl_args=cwl_args) job_data["tmp_folder"] = get_dir( get_absolute_path( job_data.get( "tmp_folder", mkdtemp(dir=cwl_args["tmp_folder"], prefix=dag_id + "_" + run_id + "_")), cwl_args["tmp_folder"])) job_data["outputs_folder"] = get_dir( get_absolute_path( job_data.get( "outputs_folder", os.path.join(cwl_args["outputs_folder"], dag_id, run_id)), cwl_args["outputs_folder"])) _, _, _, step_report = get_temp_folders(task_id=self.task_id, job_data=job_data) dump_json(job_data, step_report) return step_report
def execute(self, context): """ Creates job from collected reports of all finished tasks in a DAG. Then executes a workflow constructed from the workflow step. Writes report file location to X-Com. """ post_status(context) self.job_data = collect_reports( context) # we need it also in "on_kill" _, step_report = execute_workflow_step( workflow=context["dag"].workflow, task_id=self.task_id, job_data=self.job_data, cwl_args=context["dag"].default_args["cwl"]) return step_report
def execute(self, context): """ Creates job from collected reports of all finished tasks in a DAG. Then executes a workflow constructed from the workflow step. Writes report file location to X-Com. """ setup_cwl_logger(context["ti"]) post_status(context) self.job_data = collect_reports(context) # we need it also in "on_kill" _, step_report, skipped = execute_workflow_step( workflow=context["dag"].workflow, task_id=self.task_id, job_data=self.job_data, cwl_args=context["dag"].default_args["cwl"] ) if skipped: self.xcom_push(context, XCOM_RETURN_KEY, step_report) # need to save empty report before raising exception raise AirflowSkipException("Skip workflow step execution") # to mark it as skipped for Airflow return step_report