def test_zero_nodes(): pipeline_definitions = _read_pipeline_resource( 'resources/sample_pipelines/pipeline_invalid.json') pipeline_definitions['pipelines'][0]['nodes'] = [] with pytest.raises(ValueError): PipelineParser().parse(pipeline_definitions)
def test_multiple_pipeline_definition(): pipeline_definitions = _read_pipeline_resource( 'resources/sample_pipelines/' 'pipeline_multiple_pipeline_definitions.json') with pytest.raises(ValueError): PipelineParser().parse(pipeline_definitions)
def test_pipeline_with_dependencies(): pipeline_definitions = _read_pipeline_resource('resources/sample_pipelines/' 'pipeline_3_node_sample_with_dependencies.json') pipeline = PipelineParser().parse(pipeline_definitions) assert len(pipeline.operations['acc4527d-7cc8-4c16-b520-5aa0f50a2e34'].parent_operations) == 2
def test_multinode_pipeline(): pipeline_definitions = _read_pipeline_resource( 'resources/sample_pipelines/pipeline_3_node_sample.json') pipeline = PipelineParser().parse(pipeline_definitions) assert len(pipeline.operations) == 3
def test_supernode_pipeline(): pipeline_definitions = _read_pipeline_resource('resources/sample_pipelines/pipeline_with_supernode.json') pipeline = PipelineParser().parse(pipeline_definitions) assert len(pipeline.operations) == 4 # Confirm structure of pipeline: # Two execution nodes feed their outputs to super-node with one execution_node. # Super-node's execution node, then sends its output to external execution node. # 4 nodes total. Super-node execution node should have two parent-operations # pointing at first two nodes, and final node should have one parent pointing # at execution node WITHIN supernode. external_input_node_ids = ["db9f3f5b-b2e3-4824-aadd-c1c6bf652534", "f6584209-6f22-434f-9820-41327b6c749d"] supernode_excution_node_id = "079c0e12-eb5f-4fcc-983b-09e011869fee" external_node_id = "7628306d-2cc2-405c-94a1-fe42c95567a1" for node_id in pipeline.operations.keys(): # Validate operations list if node_id in external_input_node_ids: # These are input nodes, ensure parent_operations are empty assert len(pipeline.operations[node_id].parent_operations) == 0 continue if node_id == supernode_excution_node_id: # Node within supernode, should have two parent_ops matching external_input_node_ids assert len(pipeline.operations[node_id].parent_operations) == 2 assert set(pipeline.operations[node_id].parent_operations) == set(external_input_node_ids) continue if node_id == external_node_id: # Final external node, should have super_node embedded node as parent op. assert len(pipeline.operations[node_id].parent_operations) == 1 assert pipeline.operations[node_id].parent_operations[0] == supernode_excution_node_id continue assert False, "Invalid node_id encountered in pipeline operations!"
def test_missing_pipeline_name_should_default_to_untitled(): pipeline_definitions = _read_pipeline_resource('pipeline_valid.json') pipeline_definitions['pipelines'][0]['app_data'].pop('name') pipeline = PipelineParser().parse(pipeline_definitions) assert pipeline.name == 'untitled'
def test_missing_primary_id(): pipeline_definitions = _read_pipeline_resource('resources/sample_pipelines/pipeline_invalid.json') # Replace pipeline id with non-matching guid so primary is not found pipeline_definitions['pipelines'][0]['id'] = "deadbeef-dead-beef-dead-beefdeadbeef" with pytest.raises(ValueError): PipelineParser().parse(pipeline_definitions)
def test_missing_pipelines(): pipeline_definitions = _read_pipeline_resource( 'resources/sample_pipelines/pipeline_invalid.json') pipeline_definitions.pop('pipelines') with pytest.raises(ValueError): PipelineParser().parse(pipeline_definitions)
def test_invalid_node_type(): pipeline_definitions = _read_pipeline_resource('pipeline_valid.json') pipeline_definitions['pipelines'][0]['nodes'][0]['type'] = 'foo' with pytest.raises(ValueError) as e: PipelineParser().parse(pipeline_definitions) assert "Node type 'foo' is invalid!" in str(e.value)
def test_missing_operation_type(): pipeline_definitions = _read_pipeline_resource('pipeline_valid.json') pipeline_definitions['pipelines'][0]['nodes'][0].pop('type') with pytest.raises(ValueError) as e: PipelineParser().parse(pipeline_definitions) assert "Node type 'None' is invalid!" in str(e.value)
def test_missing_operation_id(): pipeline_definitions = _read_pipeline_resource('pipeline_valid.json') pipeline_definitions['pipelines'][0]['nodes'][0].pop('id') with pytest.raises(ValueError) as e: PipelineParser().parse(pipeline_definitions) assert "Missing field 'operation id'" in str(e.value)
def test_missing_pipeline_runtime_configuration(): pipeline_definitions = _read_pipeline_resource('pipeline_valid.json') pipeline_definitions['pipelines'][0]['app_data'].pop('runtime-config') with pytest.raises(ValueError) as e: PipelineParser().parse(pipeline_definitions) assert "Invalid pipeline: Missing runtime configuration" in str(e.value)
def test_pipeline_global_attributes(): pipeline_definitions = _read_pipeline_resource('pipeline_valid.json') pipeline = PipelineParser().parse(pipeline_definitions) assert pipeline.name == '{{name}}' assert pipeline.runtime == '{{runtime}}' assert pipeline.runtime_config == '{{runtime-config}}'
def test_missing_operation_image(): pipeline_definitions = _read_pipeline_resource('resources/sample_pipelines/pipeline_valid.json') pipeline_definitions['pipelines'][0]['nodes'][0]['app_data'].pop('runtime_image') with pytest.raises(ValueError) as e: PipelineParser().parse(pipeline_definitions) assert "Missing field 'operation runtime image'" in str(e.value)
def test_pipeline_operations_and_handle_artifact_file_details(): pipeline_definitions = _read_pipeline_resource('resources/sample_pipelines/pipeline_3_node_sample.json') pipeline = PipelineParser().parse(pipeline_definitions) assert len(pipeline.operations) == 3 for op in pipeline.operations.values(): assert '.' not in op.name
def test_valid_pipeline(valid_operation): pipeline_definitions = _read_pipeline_resource('pipeline_valid.json') pipeline = PipelineParser().parse(pipeline_definitions) assert pipeline.name == '{{name}}' assert pipeline.runtime == '{{runtime}}' assert pipeline.runtime_config == '{{runtime-config}}' assert len(pipeline.operations) == 1 assert pipeline.operations['{{uuid}}'] == valid_operation
def test_pipeline_get_envs(): # Ensure pipeline operation env lists are properly converted to dictionaries. pipeline_definitions = _read_pipeline_resource('resources/sample_pipelines/pipeline_dependency_complex.json') pipeline = PipelineParser().parse(pipeline_definitions) for op in pipeline.operations.values(): op_envs = op.env_vars_as_dict() assert op_envs['OP_NAME'] == op.name
def test_pipeline_with_dirty_list_values(valid_operation): pipeline_definitions = _read_pipeline_resource( 'resources/sample_pipelines/pipeline_with_invalid_list_values.json') pipeline = PipelineParser().parse(pipeline_definitions) assert pipeline.name == '{{name}}' assert pipeline.runtime == '{{runtime}}' assert pipeline.runtime_config == '{{runtime-config}}' assert len(pipeline.operations) == 1 assert pipeline.operations['{{uuid}}'] == valid_operation
def test_pipeline_get_envs(): # Ensure pipeline operation env lists are properly converted to dictionaries. pipeline_definitions = _read_pipeline_resource( 'pipeline_dependency_complex.json') pipeline = PipelineParser().parse(pipeline_definitions) local_processor = LocalPipelineProcessor() for op in pipeline.operations.values(): op_envs = local_processor._get_envs(op) assert op_envs['OP_NAME'] == op.name
def test_pipeline_execution_order_in_simple_pipeline(): expected_operation_names = ['f', 'a', 'c', 'g'] pipeline_definitions = _read_pipeline_resource('resources/sample_pipelines/pipeline_dependency_simple.json') pipeline = PipelineParser().parse(pipeline_definitions) current_ordered_operation_names = _get_operation_names(pipeline.operations.values()) assert current_ordered_operation_names != expected_operation_names operations = LocalPipelineProcessor.\ _sort_operations(operations_by_id=pipeline.operations) ordered_operation_names = _get_operation_names(operations) assert ordered_operation_names == expected_operation_names
def test_pipeline_execution_order_in_complex_pipeline(): expected_operation_names = [ 'a', 'b', 'c', 'd', 'e', 'f', 'x', 'y', 'g', 'h' ] pipeline_definitions = _read_pipeline_resource( 'pipeline_dependency_complex.json') pipeline = PipelineParser().parse(pipeline_definitions) current_ordered_operation_names = _get_operation_names( pipeline.operations.values()) assert current_ordered_operation_names != expected_operation_names operations = LocalPipelineProcessor.\ _sort_operations(operations_by_id=pipeline.operations) ordered_operation_names = _get_operation_names(operations) assert ordered_operation_names == expected_operation_names
def test_scrub_list_function(): env_variables_input = ['FOO=Bar', 'BAR=Foo', None, ''] env_variables_output = ['FOO=Bar', 'BAR=Foo'] assert PipelineParser()._scrub_list( env_variables_input) == env_variables_output