def test_no_outputs_no_inputs_config_schema(): assert create_environment_config(no_input_no_output_pipeline()) with pytest.raises(PipelineConfigEvaluationError) as exc_context: create_environment_config(no_input_no_output_pipeline(), {'solids': {'return_one': {}}}) assert len(exc_context.value.errors) == 1 assert 'Error 1: Undefined field "return_one" at path root:solids' in exc_context.value.message
def test_basic_json_misnamed_output_config_schema(): with pytest.raises(PipelineConfigEvaluationError) as exc_context: create_environment_config( single_int_named_output_pipeline(), { 'solids': { 'return_named_one': {'outputs': [{'wrong_name': {'json': {'path': 'foo'}}}]} } }, ) assert len(exc_context.value.errors) == 1 assert 'Error 1: Undefined field "wrong_name"' in exc_context.value.message assert 'at path root:solids:return_named_one:outputs[0]' in exc_context.value.message
def create_test_pipeline_execution_context( loggers=None, resources=None, tags=None, run_config_loggers=None ): run_id = str(uuid.uuid4()) loggers = check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) mode_def = ModeDefinition(loggers=loggers) pipeline_def = PipelineDefinition( name='test_legacy_context', solids=[], mode_definitions=[mode_def] ) run_config_loggers = check.opt_list_param( run_config_loggers, 'run_config_loggers', of_type=logging.Logger ) run_config = RunConfig(run_id, tags=tags, loggers=run_config_loggers) environment_config = create_environment_config( pipeline_def, {'loggers': {key: {} for key in loggers}} ) loggers = _create_loggers(environment_config, run_config, pipeline_def, mode_def) log_manager = DagsterLogManager(run_config.run_id, {}, loggers) return construct_pipeline_execution_context( run_config=run_config, pipeline_def=pipeline_def, resources=resources, environment_config=environment_config, run_storage=InMemoryRunStorage(), intermediates_manager=InMemoryIntermediatesManager(), log_manager=log_manager, )
def test_basic_json_default_output_config_schema(): env = create_environment_config( single_int_output_pipeline(), { 'solids': { 'return_one': { 'outputs': [{ 'result': { 'json': { 'path': 'foo' } } }] } } }, ) assert env.solids['return_one'] assert env.solids['return_one'].outputs == [{ 'result': { 'json': { 'path': 'foo' } } }]
def test_compile(): run_config = RunConfig() environment_config = create_environment_config( composition, {'solids': { 'add_four': { 'inputs': { 'num': { 'value': 1 } } } }}, run_config=None) plan = ExecutionPlan.build( composition, environment_config, composition.get_mode_definition(run_config.mode)) res = coalesce_execution_steps(plan) assert set(res.keys()) == { 'add_four.adder_1.adder_1', 'add_four.adder_1.adder_2', 'add_four.adder_2.adder_1', 'add_four.adder_2.adder_2', 'div_four.div_1', 'div_four.div_2', }
def test_no_outputs_one_input_config_schema(): assert create_environment_config( one_input_no_output_pipeline(), {'solids': {'take_input_return_nothing': {'inputs': {'dummy': {'value': 'value'}}}}}, ) with pytest.raises(PipelineConfigEvaluationError) as exc_context: create_environment_config( one_input_no_output_pipeline(), { 'solids': { 'take_input_return_nothing': { 'inputs': {'dummy': {'value': 'value'}}, 'outputs': {}, } } }, ) assert len(exc_context.value.errors) == 1 exp_msg = 'Error 1: Undefined field "outputs" at path root:solids:take_input_return_nothing' assert exp_msg in exc_context.value.message