def define_repository(): return RepositoryDefinition.eager_construction( name='dagster_test_repository', pipelines=[ define_empty_pipeline(), define_single_mode_pipeline(), define_multi_mode_pipeline(), define_multi_mode_with_resources_pipeline(), ], )
def define_repository(): return RepositoryDefinition.eager_construction( name='pyspark_pagerank_repo_step_one', pipelines=[ define_pipeline(), define_pyspark_pagerank_step_five(), define_pyspark_pagerank_step_four(), define_pyspark_pagerank_step_one(), define_pyspark_pagerank_step_three(), define_pyspark_pagerank_step_two(), ], )
def test_eager(): called = defaultdict(int) repo = RepositoryDefinition.eager_construction( name='some_repo', pipelines=[ create_single_node_pipeline('foo', called), create_single_node_pipeline('bar', called), ], ) assert repo.get_pipeline('foo').name == 'foo' assert repo.get_pipeline('bar').name == 'bar'
def define_repository(): return RepositoryDefinition.eager_construction( name='pyspark_pagerank_repo_step_one', pipelines=[ pyspark_pagerank, pyspark_pagerank_step_one, pyspark_pagerank_step_two, pyspark_pagerank_step_three, pyspark_pagerank_step_four, pyspark_pagerank_step_five, ], )
def test_repositry_has_solid_def(): @composite_solid(outputs=[OutputDefinition()]) def inner(): return add_one(return_one()) @composite_solid def outer(): add_one(inner()) @pipeline def a_pipeline(): outer() repo_def = RepositoryDefinition.eager_construction( name='has_solid_def_test', pipelines=[a_pipeline]) assert repo_def.solid_def_named('inner')