def test_workflow_schema_merging(
    calculation_layer, property_type, workflow_merge_function
):
    """Tests that two of the exact the same calculations get merged into one
    by the `WorkflowGraph`."""

    schema = registered_calculation_schemas[calculation_layer][property_type]

    if callable(schema):
        schema = schema()

    if not isinstance(schema, WorkflowCalculationSchema):
        pytest.skip("Not a `WorkflowCalculationSchema`.")

    property_class = getattr(openff.evaluator.properties, property_type)

    dummy_property = create_dummy_property(property_class)

    global_metadata = create_dummy_metadata(dummy_property, calculation_layer)

    workflow_a = Workflow(global_metadata, "workflow_a")
    workflow_a.schema = schema.workflow_schema

    workflow_b = Workflow(global_metadata, "workflow_b")
    workflow_b.schema = schema.workflow_schema

    workflow_graph = workflow_merge_function(workflow_a, workflow_b)

    workflow_graph_a = workflow_a.to_graph()
    workflow_graph_b = workflow_b.to_graph()

    dependants_graph_a = workflow_graph_a._protocol_graph._build_dependants_graph(
        workflow_graph_a.protocols, False, apply_reduction=True
    )
    dependants_graph_b = workflow_graph_b._protocol_graph._build_dependants_graph(
        workflow_graph_b.protocols, False, apply_reduction=True
    )

    ordered_dict_a = OrderedDict(sorted(dependants_graph_a.items()))
    ordered_dict_a = {key: sorted(value) for key, value in ordered_dict_a.items()}
    ordered_dict_b = OrderedDict(sorted(dependants_graph_b.items()))
    ordered_dict_b = {key: sorted(value) for key, value in ordered_dict_b.items()}

    merge_order_a = graph.topological_sort(ordered_dict_a)
    merge_order_b = graph.topological_sort(ordered_dict_b)

    assert len(workflow_graph.protocols) == len(workflow_a.protocols)

    for protocol_id in workflow_a.protocols:
        assert protocol_id in workflow_graph.protocols

    for protocol_id_A, protocol_id_B in zip(merge_order_a, merge_order_b):

        assert protocol_id_A == protocol_id_B

        assert (
            workflow_a.protocols[protocol_id_A].schema.json()
            == workflow_b.protocols[protocol_id_B].schema.json()
        )
Пример #2
0
def test_nested_input():

    dict_protocol = DummyInputOutputProtocol("dict_protocol")
    dict_protocol.input_value = {"a": ThermodynamicState(1.0 * unit.kelvin)}

    quantity_protocol = DummyInputOutputProtocol("quantity_protocol")
    quantity_protocol.input_value = ProtocolPath("output_value[a].temperature",
                                                 dict_protocol.id)

    schema = WorkflowSchema()
    schema.protocol_schemas = [dict_protocol.schema, quantity_protocol.schema]
    schema.validate()

    workflow = Workflow({})
    workflow.schema = schema

    workflow_graph = workflow.to_graph()

    with tempfile.TemporaryDirectory() as temporary_directory:

        with DaskLocalCluster() as calculation_backend:

            results_futures = workflow_graph.execute(temporary_directory,
                                                     calculation_backend)

            assert len(results_futures) == 1
            result = results_futures[0].result()

    assert isinstance(result, WorkflowResult)
Пример #3
0
def test_simple_workflow_graph(calculation_backend, compute_resources,
                               exception):

    expected_value = (1 * unit.kelvin).plus_minus(0.1 * unit.kelvin)

    protocol_a = DummyInputOutputProtocol("protocol_a")
    protocol_a.input_value = expected_value
    protocol_b = DummyInputOutputProtocol("protocol_b")
    protocol_b.input_value = ProtocolPath("output_value", protocol_a.id)

    schema = WorkflowSchema()
    schema.protocol_schemas = [protocol_a.schema, protocol_b.schema]
    schema.final_value_source = ProtocolPath("output_value", protocol_b.id)
    schema.validate()

    workflow = Workflow({})
    workflow.schema = schema

    workflow_graph = workflow.to_graph()

    with tempfile.TemporaryDirectory() as directory:

        if calculation_backend is not None:

            with DaskLocalCluster() as calculation_backend:

                if exception:

                    with pytest.raises(AssertionError):

                        workflow_graph.execute(directory, calculation_backend,
                                               compute_resources)

                    return

                else:

                    results_futures = workflow_graph.execute(
                        directory, calculation_backend, compute_resources)

                assert len(results_futures) == 1
                result = results_futures[0].result()

        else:

            result = workflow_graph.execute(directory, calculation_backend,
                                            compute_resources)[0]

            if exception:

                with pytest.raises(AssertionError):

                    workflow_graph.execute(directory, calculation_backend,
                                           compute_resources)

                return

        assert isinstance(result, WorkflowResult)
        assert result.value.value == expected_value.value
Пример #4
0
def test_workflow_with_groups():

    expected_value = (1 * unit.kelvin).plus_minus(0.1 * unit.kelvin)

    protocol_a = DummyInputOutputProtocol("protocol_a")
    protocol_a.input_value = expected_value
    protocol_b = DummyInputOutputProtocol("protocol_b")
    protocol_b.input_value = ProtocolPath("output_value", protocol_a.id)

    conditional_group = ConditionalGroup("conditional_group")
    conditional_group.add_protocols(protocol_a, protocol_b)

    condition = ConditionalGroup.Condition()
    condition.right_hand_value = 2 * unit.kelvin
    condition.type = ConditionalGroup.Condition.Type.LessThan
    condition.left_hand_value = ProtocolPath("output_value.value",
                                             conditional_group.id,
                                             protocol_b.id)
    conditional_group.add_condition(condition)

    schema = WorkflowSchema()
    schema.protocol_schemas = [conditional_group.schema]
    schema.final_value_source = ProtocolPath("output_value",
                                             conditional_group.id,
                                             protocol_b.id)
    schema.validate()

    workflow = Workflow({})
    workflow.schema = schema

    workflow_graph = workflow.to_graph()

    with tempfile.TemporaryDirectory() as directory:

        with DaskLocalCluster() as calculation_backend:

            results_futures = workflow_graph.execute(directory,
                                                     calculation_backend)
            assert len(results_futures) == 1

            result = results_futures[0].result()

        assert isinstance(result, WorkflowResult)
        assert result.value.value == expected_value.value
def test_density_dielectric_merging(workflow_merge_function):

    substance = Substance.from_components("C")

    density = openff.evaluator.properties.Density(
        thermodynamic_state=ThermodynamicState(
            temperature=298 * unit.kelvin, pressure=1 * unit.atmosphere
        ),
        phase=PropertyPhase.Liquid,
        substance=substance,
        value=10 * unit.gram / unit.mole,
        uncertainty=1 * unit.gram / unit.mole,
    )

    dielectric = openff.evaluator.properties.DielectricConstant(
        thermodynamic_state=ThermodynamicState(
            temperature=298 * unit.kelvin, pressure=1 * unit.atmosphere
        ),
        phase=PropertyPhase.Liquid,
        substance=substance,
        value=10 * unit.gram / unit.mole,
        uncertainty=1 * unit.gram / unit.mole,
    )

    density_schema = density.default_simulation_schema().workflow_schema
    dielectric_schema = dielectric.default_simulation_schema().workflow_schema

    density_metadata = Workflow.generate_default_metadata(
        density, "smirnoff99Frosst-1.1.0.offxml", []
    )

    dielectric_metadata = Workflow.generate_default_metadata(
        density, "smirnoff99Frosst-1.1.0.offxml", []
    )

    density_workflow = Workflow(density_metadata)
    density_workflow.schema = density_schema

    dielectric_workflow = Workflow(dielectric_metadata)
    dielectric_workflow.schema = dielectric_schema

    workflow_merge_function(density_workflow, dielectric_workflow)

    density_workflow_graph = density_workflow.to_graph()
    dielectric_workflow_graph = dielectric_workflow.to_graph()

    dependants_graph_a = density_workflow_graph._protocol_graph._build_dependants_graph(
        density_workflow_graph.protocols, False, apply_reduction=True
    )
    dependants_graph_b = (
        dielectric_workflow_graph._protocol_graph._build_dependants_graph(
            dielectric_workflow_graph.protocols, False, apply_reduction=True
        )
    )

    merge_order_a = graph.topological_sort(dependants_graph_a)
    merge_order_b = graph.topological_sort(dependants_graph_b)

    for protocol_id_A, protocol_id_B in zip(merge_order_a, merge_order_b):

        if (
            protocol_id_A.find("extract_traj") < 0
            and protocol_id_A.find("extract_stats") < 0
        ):

            assert (
                density_workflow.protocols[protocol_id_A].schema.json()
                == dielectric_workflow.protocols[protocol_id_B].schema.json()
            )

        else:

            assert (
                density_workflow.protocols[protocol_id_A].schema.json()
                != dielectric_workflow.protocols[protocol_id_B].schema.json()
            )