def test_partial_reexecute(run_config): with instance_for_test() as instance: result_1 = execute_pipeline( reconstructable(dynamic_pipeline), instance=instance, run_config=run_config, ) assert result_1.success result_2 = reexecute_pipeline( reconstructable(dynamic_pipeline), parent_run_id=result_1.run_id, instance=instance, step_selection=["sum_numbers*"], run_config=run_config, ) assert result_2.success result_3 = reexecute_pipeline( reconstructable(dynamic_pipeline), parent_run_id=result_1.run_id, instance=instance, step_selection=["multiply_by_two[1]*"], run_config=run_config, ) assert result_3.success
def test_intermediate_storage_reexecution(): @solid def return_one(_): return 1 @solid def plus_one(_, one): return one + 1 @pipeline def foo(): plus_one(return_one()) run_config = {"intermediate_storage": {"filesystem": {}}} instance = DagsterInstance.ephemeral() result = execute_pipeline(foo, run_config=run_config, instance=instance) assert result.success reexecution_result = reexecute_pipeline(foo, run_config=run_config, parent_run_id=result.run_id, instance=instance) assert reexecution_result.success partial_reexecution_result = reexecute_pipeline( foo, run_config=run_config, step_selection=["plus_one"], parent_run_id=result.run_id, instance=instance, ) assert partial_reexecution_result.success
def test_bad_step_selection(): with instance_for_test() as instance: result_1 = execute_pipeline(dynamic_pipeline, instance=instance) assert result_1.success # this exact error could be improved, but it should fail if you try to select # both the dynamic outputting step key and something resolved by it in the previous run with pytest.raises(DagsterExecutionStepNotFoundError): reexecute_pipeline( dynamic_pipeline, parent_run_id=result_1.run_id, instance=instance, step_selection=["emit", "multiply_by_two[1]"], )
def test_map_reexecute_after_fail(run_config): with instance_for_test() as instance: result_1 = execute_pipeline( reconstructable(dynamic_pipeline), instance=instance, run_config=merge_dicts( run_config, {"solids": { "emit": { "config": { "fail": True } } }}, ), raise_on_error=False, ) assert not result_1.success result_2 = reexecute_pipeline( reconstructable(dynamic_pipeline), parent_run_id=result_1.run_id, instance=instance, run_config=run_config, ) assert result_2.success
def test_map_multi_reexecute_after_fail(): with instance_for_test() as instance: result_1 = execute_pipeline( reconstructable(dynamic_pipeline), instance=instance, run_config={ "execution": { "multiprocess": {} }, "solids": { "emit": { "config": { "fail": True } } }, }, raise_on_error=False, ) assert not result_1.success result_2 = reexecute_pipeline( reconstructable(dynamic_pipeline), parent_run_id=result_1.run_id, run_config={ "execution": { "multiprocess": {} }, }, instance=instance, ) assert result_2.success
def test_full_reexecute(): with instance_for_test() as instance: result_1 = execute_pipeline(dynamic_pipeline, instance=instance) assert result_1.success result_2 = reexecute_pipeline(dynamic_pipeline, parent_run_id=result_1.run_id, instance=instance) assert result_2.success
return model @pipeline(mode_defs=[ ModeDefinition( "test", resource_defs={"fs_asset_store": custom_path_fs_asset_store}), ModeDefinition("local", resource_defs={"fs_asset_store": local_asset_store}), ]) def custom_path_pipeline(): train_model(parse_df(call_api())) @repository def builtin_custom_path_repo(): return [custom_path_pipeline] if __name__ == "__main__": instance = DagsterInstance.ephemeral() result = execute_pipeline(custom_path_pipeline, mode="local", instance=instance) reexecute_pipeline( custom_path_pipeline, result.run_id, mode="local", instance=instance, step_selection=["parse_df.compute*"], )