def test_execution_crash(): run_id = 'run-1' repository_container = RepositoryContainer( RepositoryTargetInfo( repository_yaml=None, python_file=__file__, fn_name='define_crashy_pipeline', module_name=None, ) ) pipeline = define_crashy_pipeline() env_config = { 'solids': { 'sum_solid': {'inputs': {'num': {'csv': {'path': script_relative_path('num.csv')}}}} } } selector = ExecutionSelector('pandas_hello_world') pipeline_run = InMemoryPipelineRun( run_id, selector, env_config, create_execution_plan(pipeline, env_config) ) execution_manager = MultiprocessingExecutionManager() execution_manager.execute_pipeline(repository_container, pipeline, pipeline_run) execution_manager.join() assert pipeline_run.status == PipelineRunStatus.FAILURE last_log = pipeline_run.all_logs()[-1] assert last_log.message == ( 'Exception: Pipeline execution process for {run_id} unexpectedly exited\n' ).format(run_id=run_id)
def test_running(): run_id = 'run-1' repository_container = RepositoryContainer( RepositoryTargetInfo( repository_yaml=None, python_file=__file__, fn_name='define_passing_pipeline', module_name=None, ) ) pipeline = define_passing_pipeline() env_config = { 'solids': { 'sum_solid': {'inputs': {'num': {'csv': {'path': script_relative_path('num.csv')}}}} } } selector = ExecutionSelector('pandas_hello_world') pipeline_run = InMemoryPipelineRun( run_id, selector, env_config, create_execution_plan(pipeline, env_config) ) execution_manager = MultiprocessingExecutionManager() execution_manager.execute_pipeline(repository_container, pipeline, pipeline_run) execution_manager.join() assert pipeline_run.status == PipelineRunStatus.SUCCESS events = pipeline_run.all_logs() assert events process_start_events = get_events_of_type(events, EventType.PIPELINE_PROCESS_START) assert len(process_start_events) == 1 process_started_events = get_events_of_type(events, EventType.PIPELINE_PROCESS_STARTED) assert len(process_started_events) == 1
def test_failing(): run_id = 'run-1' repository_container = RepositoryContainer( RepositoryTargetInfo( repository_yaml=None, python_file=__file__, fn_name='define_failing_pipeline', module_name=None, ) ) pipeline = define_failing_pipeline() env_config = { 'solids': { 'sum_solid': {'inputs': {'num': {'csv': {'path': script_relative_path('num.csv')}}}} } } selector = ExecutionSelector('pandas_hello_world') pipeline_run = InMemoryPipelineRun( run_id, selector, env_config, create_execution_plan(pipeline, env_config) ) execution_manager = MultiprocessingExecutionManager() execution_manager.execute_pipeline(repository_container, pipeline, pipeline_run) execution_manager.join() assert pipeline_run.status == PipelineRunStatus.FAILURE assert pipeline_run.all_logs()
def define_context(): return DagsterGraphQLContext( RepositoryContainer(repository=define_repository()), PipelineRunStorage(), execution_manager=SynchronousExecutionManager(), )