def execute_plan_iterator( execution_plan, pipeline_run, instance, retries=None, environment_dict=None, ): check.inst_param(execution_plan, 'execution_plan', ExecutionPlan) check.inst_param(pipeline_run, 'pipeline_run', PipelineRun) check.inst_param(instance, 'instance', DagsterInstance) retries = check.opt_inst_param(retries, 'retries', Retries, Retries.disabled_mode()) environment_dict = check.opt_dict_param(environment_dict, 'environment_dict') return iter( _ExecuteRunWithPlanIterable( execution_plan=execution_plan, environment_dict=environment_dict, pipeline_run=pipeline_run, instance=instance, retries=retries, iterator=inner_plan_execution_iterator, raise_on_error=False, ))
def execute_plan_iterator( execution_plan: ExecutionPlan, pipeline_run: PipelineRun, instance: DagsterInstance, retries: Optional[Retries] = None, run_config: Optional[dict] = None, ) -> Iterator[DagsterEvent]: check.inst_param(execution_plan, "execution_plan", ExecutionPlan) check.inst_param(pipeline_run, "pipeline_run", PipelineRun) check.inst_param(instance, "instance", DagsterInstance) retries = check.opt_inst_param(retries, "retries", Retries, Retries.disabled_mode()) run_config = check.opt_dict_param(run_config, "run_config") return iter( _ExecuteRunWithPlanIterable( execution_plan=execution_plan, iterator=inner_plan_execution_iterator, execution_context_manager=PlanExecutionContextManager( retries=retries, execution_plan=execution_plan, run_config=run_config, pipeline_run=pipeline_run, instance=instance, raise_on_error=False, ), ))
def execute_plan_iterator( execution_plan, pipeline_run, instance, retries=None, run_config=None, ): check.inst_param(execution_plan, 'execution_plan', ExecutionPlan) check.inst_param(pipeline_run, 'pipeline_run', PipelineRun) check.inst_param(instance, 'instance', DagsterInstance) retries = check.opt_inst_param(retries, 'retries', Retries, Retries.disabled_mode()) run_config = check.opt_dict_param(run_config, 'run_config') return iter( _ExecuteRunWithPlanIterable( execution_plan=execution_plan, iterator=inner_plan_execution_iterator, execution_context_manager=PlanExecutionContextManager( retries=retries, execution_plan=execution_plan, run_config=run_config, pipeline_run=pipeline_run, instance=instance, raise_on_error=False, ), ))
def execute_plan_iterator( execution_plan, pipeline_run, instance, retries=None, environment_dict=None, ): check.inst_param(execution_plan, 'execution_plan', ExecutionPlan) check.inst_param(pipeline_run, 'pipeline_run', PipelineRun) check.inst_param(instance, 'instance', DagsterInstance) retries = check.opt_inst_param(retries, 'retries', Retries, Retries.disabled_mode()) environment_dict = check.opt_dict_param(environment_dict, 'environment_dict') initialization_manager = pipeline_initialization_manager( execution_plan.pipeline_def, environment_dict, pipeline_run, instance, execution_plan, ) for event in initialization_manager.generate_setup_events(): yield event pipeline_context = initialization_manager.get_object() generator_closed = False try: if pipeline_context: for event in inner_plan_execution_iterator( pipeline_context, execution_plan=execution_plan, retries=retries ): yield event except GeneratorExit: # Shouldn't happen, but avoid runtime-exception in case this generator gets GC-ed # (see https://amir.rachum.com/blog/2017/03/03/generator-cleanup/). generator_closed = True raise finally: for event in initialization_manager.generate_teardown_events(): if not generator_closed: yield event
def build_flyte_sdk_workflow(self): ordered_step_dict = self.execution_plan.execution_deps() instance = DagsterInstance.ephemeral() pipeline_run = instance.create_run( pipeline_name=self.execution_plan.pipeline_def.display_name, run_id=self.execution_plan.pipeline_def.display_name, run_config=self.run_config, mode=None, solids_to_execute=None, step_keys_to_execute=None, status=None, tags=None, root_run_id=None, parent_run_id=None, pipeline_snapshot=self.execution_plan.pipeline_def. get_pipeline_snapshot(), execution_plan_snapshot=snapshot_from_execution_plan( self.execution_plan, self.execution_plan.pipeline_def.get_pipeline_snapshot_id()), parent_pipeline_snapshot=self.execution_plan.pipeline_def. get_parent_pipeline_snapshot(), ) initialization_manager = PlanExecutionContextManager( Retries.disabled_mode(), self.execution_plan, self.run_config, instance.get_run_by_id( self.execution_plan.pipeline_def.display_name), instance, ) list(initialization_manager.prepare_context()) pipeline_context = initialization_manager.get_context() for step_key in ordered_step_dict: solid_name = self.execution_plan.get_step_by_key( step_key).solid_name self.sdk_node_dict[solid_name] = self.get_sdk_node( pipeline_context, instance, pipeline_run, step_key, storage_request=self.compute_dict[solid_name].get( "storage_request", None), cpu_request=self.compute_dict[solid_name].get( "cpu_request", None), memory_request=self.compute_dict[solid_name].get( "memory_request", None), storage_limit=self.compute_dict[solid_name].get( "storage_limit", None), cpu_limit=self.compute_dict[solid_name].get("cpu_limit", None), memory_limit=self.compute_dict[solid_name].get( "memory_limit", None), )
def retries(self): return Retries.disabled_mode()