def test_pandas_dask(): environment_dict = { 'solids': { 'pandas_solid': { 'inputs': { 'df': { 'csv': { 'path': file_relative_path(__file__, 'ex.csv') } } } } } } result = execute_pipeline( ExecutionTargetHandle.for_pipeline_python_file( __file__, pandas_pipeline.name).build_pipeline_definition(), environment_dict={ 'storage': { 'filesystem': {} }, **environment_dict }, run_config=RunConfig(executor_config=DaskConfig(timeout=30)), ) assert result.success
def test_composite_execute(): result = execute_on_dask( ExecutionTargetHandle.for_pipeline_python_file(__file__, 'dask_composite_pipeline'), env_config={'storage': {'filesystem': {}}}, dask_config=DaskConfig(timeout=30), ) assert result.success
def test_execute_on_dask(): result = execute_on_dask( ExecutionTargetHandle.for_pipeline_python_file(__file__, 'dask_engine_pipeline'), env_config={'storage': {'filesystem': {}}}, dask_config=DaskConfig(timeout=30), ) assert result.result_for_solid('simple').output_value() == 1
def test_execute_on_dask(): result = execute_on_dask( ExecutionTargetHandle.for_pipeline_fn(define_dask_test_pipeline), env_config={'storage': { 'filesystem': {} }}, dask_config=DaskConfig(timeout=30), ) assert result.result_for_solid('simple').result_value() == 1
def test_composite_execute(): result = execute_pipeline( ExecutionTargetHandle.for_pipeline_python_file( __file__, 'dask_composite_pipeline').build_pipeline_definition(), environment_dict={'storage': { 'filesystem': {} }}, run_config=RunConfig(executor_config=DaskConfig(timeout=30)), ) assert result.success
def test_execute_on_dask(): result = execute_pipeline( ExecutionTargetHandle.for_pipeline_python_file( __file__, 'dask_engine_pipeline').build_pipeline_definition(), environment_dict={'storage': { 'filesystem': {} }}, run_config=RunConfig(executor_config=DaskConfig(timeout=30)), ) assert result.result_for_solid('simple').output_value() == 1
def test_execute_on_dask(): '''This test is flaky on py27, I believe because of https://github.com/dask/distributed/issues/2446. For now, we just retry a couple times... ''' result = execute_on_dask( ExecutionTargetHandle.for_pipeline_fn(define_dask_test_pipeline), env_config={'storage': { 'filesystem': {} }}, run_config=RunConfig(storage_mode=RunStorageMode.FILESYSTEM), dask_config=DaskConfig(timeout=30), ) assert result.result_for_solid('simple').transformed_value() == 1
def test_dask_cluster(): result = execute_on_dask( ExecutionTargetHandle.for_pipeline_module( 'dagster_examples.toys.hammer', 'define_hammer_pipeline'), env_config={ 'storage': { 's3': { 's3_bucket': 'dagster-airflow-scratch' } } }, run_config=RunConfig(storage_mode=RunStorageMode.S3), dask_config=DaskConfig(address='%s:8786' % os.getenv('DASK_ADDRESS')), ) assert result.success assert result.result_for_solid('total').transformed_value() == 4
def test_dask_cluster(dask_address): result = execute_pipeline( ExecutionTargetHandle.for_pipeline_module( 'dagster_examples.toys.hammer', 'hammer_pipeline').build_pipeline_definition(), environment_dict={ 'storage': { 's3': { 'config': { 's3_bucket': 'dagster-airflow-scratch' } } } }, run_config=RunConfig(executor_config=DaskConfig(address='%s:8786' % dask_address)), ) assert result.success assert result.result_for_solid('total').output_value() == 4