def test_nested_input(): dict_protocol = DummyProtocol("dict_protocol") dict_protocol.input_value = {"a": ThermodynamicState(1.0 * unit.kelvin)} quantity_protocol = DummyProtocol("quantity_protocol") quantity_protocol.input_value = ProtocolPath("output_value[a].temperature", dict_protocol.id) schema = WorkflowSchema() schema.protocol_schemas = [dict_protocol.schema, quantity_protocol.schema] schema.validate() workflow = Workflow({}) workflow.schema = schema workflow_graph = workflow.to_graph() with tempfile.TemporaryDirectory() as temporary_directory: with DaskLocalCluster() as calculation_backend: results_futures = workflow_graph.execute(temporary_directory, calculation_backend) assert len(results_futures) == 1 result = results_futures[0].result() assert isinstance(result, WorkflowResult)
def test_protocol_graph_execution(calculation_backend, compute_resources): if calculation_backend is not None: calculation_backend.start() protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = 1 protocol_b = DummyProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) protocol_graph = ProtocolGraph() protocol_graph.add_protocols(protocol_a, protocol_b) with tempfile.TemporaryDirectory() as directory: results = protocol_graph.execute(directory, calculation_backend, compute_resources) final_result = results[protocol_b.id] if calculation_backend is not None: final_result = final_result.result() with open(final_result[1]) as file: results_b = json.load(file, cls=TypedJSONDecoder) assert results_b[".output_value"] == protocol_a.input_value if compute_resources is not None: assert protocol_b.output_value == protocol_a.input_value if calculation_backend is not None: calculation_backend.stop()
def test_simple_workflow_graph(calculation_backend, compute_resources, exception): expected_value = (1 * unit.kelvin).plus_minus(0.1 * unit.kelvin) protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = expected_value protocol_b = DummyProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) schema = WorkflowSchema() schema.protocol_schemas = [protocol_a.schema, protocol_b.schema] schema.final_value_source = ProtocolPath("output_value", protocol_b.id) schema.validate() workflow = Workflow({}) workflow.schema = schema workflow_graph = workflow.to_graph() with tempfile.TemporaryDirectory() as directory: if calculation_backend is not None: with DaskLocalCluster() as calculation_backend: if exception: with pytest.raises(AssertionError): workflow_graph.execute(directory, calculation_backend, compute_resources) return else: results_futures = workflow_graph.execute( directory, calculation_backend, compute_resources) assert len(results_futures) == 1 result = results_futures[0].result() else: result = workflow_graph.execute(directory, calculation_backend, compute_resources)[0] if exception: with pytest.raises(AssertionError): workflow_graph.execute(directory, calculation_backend, compute_resources) return assert isinstance(result, WorkflowResult) assert result.value.value == expected_value.value
def build_easy_graph(): protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = 1 protocol_b = DummyProtocol("protocol_b") protocol_b.input_value = 1 return [protocol_a], [protocol_b]
def test_conditional_protocol_group(): with tempfile.TemporaryDirectory() as directory: initial_value = 2 * unit.kelvin value_protocol_a = DummyProtocol("protocol_a") value_protocol_a.input_value = initial_value add_values = AddValues("add_values") add_values.values = [ ProtocolPath("output_value", value_protocol_a.id), ProtocolPath("output_value", value_protocol_a.id), ] condition = ConditionalGroup.Condition() condition.left_hand_value = ProtocolPath("result", add_values.id) condition.right_hand_value = ProtocolPath("output_value", value_protocol_a.id) condition.type = ConditionalGroup.Condition.Type.GreaterThan protocol_group = ConditionalGroup("protocol_group") protocol_group.conditions.append(condition) protocol_group.add_protocols(value_protocol_a, add_values) protocol_group.execute(directory, ComputeResources()) assert (protocol_group.get_value(ProtocolPath( "result", add_values.id)) == 4 * unit.kelvin)
def test_conditional_protocol_group_fail(): with tempfile.TemporaryDirectory() as directory: initial_value = 2 * unit.kelvin value_protocol_a = DummyProtocol("protocol_a") value_protocol_a.input_value = initial_value add_values = AddValues("add_values") add_values.values = [ ProtocolPath("output_value", value_protocol_a.id), ProtocolPath("output_value", value_protocol_a.id), ] condition = ConditionalGroup.Condition() condition.left_hand_value = ProtocolPath("result", add_values.id) condition.right_hand_value = ProtocolPath("output_value", value_protocol_a.id) condition.type = ConditionalGroup.Condition.Type.LessThan protocol_group = ConditionalGroup("protocol_group") protocol_group.conditions.append(condition) protocol_group.max_iterations = 10 protocol_group.add_protocols(value_protocol_a, add_values) with pytest.raises(RuntimeError): protocol_group.execute(directory, ComputeResources())
def test_conditional_group_self_reference(): """Tests that protocols within a conditional group can access the outputs of its parent, such as the current iteration of the group.""" max_iterations = 10 criteria = random.randint(1, max_iterations - 1) group = ConditionalGroup("conditional_group") group.max_iterations = max_iterations protocol = DummyProtocol("protocol_a") protocol.input_value = ProtocolPath("current_iteration", group.id) condition_1 = ConditionalGroup.Condition() condition_1.left_hand_value = ProtocolPath("output_value", group.id, protocol.id) condition_1.right_hand_value = criteria condition_1.type = ConditionalGroup.Condition.Type.GreaterThan condition_2 = ConditionalGroup.Condition() condition_2.left_hand_value = ProtocolPath("current_iteration", group.id) condition_2.right_hand_value = criteria condition_2.type = ConditionalGroup.Condition.Type.GreaterThan group.add_protocols(protocol) group.add_condition(condition_1) group.add_condition(condition_2) with tempfile.TemporaryDirectory() as directory: group.execute(directory, ComputeResources()) assert protocol.output_value == criteria + 1
def test_protocol_group_execution(): protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = 1 protocol_b = DummyProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) protocol_group = ProtocolGroup("protocol_group") protocol_group.add_protocols(protocol_a, protocol_b) with tempfile.TemporaryDirectory() as directory: protocol_group.execute(directory, ComputeResources()) value_path = ProtocolPath("output_value", protocol_group.id, protocol_b.id) final_value = protocol_group.get_value(value_path) assert final_value == protocol_a.input_value
def test_workflow_with_groups(): expected_value = (1 * unit.kelvin).plus_minus(0.1 * unit.kelvin) protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = expected_value protocol_b = DummyProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) conditional_group = ConditionalGroup("conditional_group") conditional_group.add_protocols(protocol_a, protocol_b) condition = ConditionalGroup.Condition() condition.right_hand_value = 2 * unit.kelvin condition.type = ConditionalGroup.Condition.Type.LessThan condition.left_hand_value = ProtocolPath("output_value.value", conditional_group.id, protocol_b.id) conditional_group.add_condition(condition) schema = WorkflowSchema() schema.protocol_schemas = [conditional_group.schema] schema.final_value_source = ProtocolPath("output_value", conditional_group.id, protocol_b.id) schema.validate() workflow = Workflow({}) workflow.schema = schema workflow_graph = workflow.to_graph() with tempfile.TemporaryDirectory() as directory: with DaskLocalCluster() as calculation_backend: results_futures = workflow_graph.execute(directory, calculation_backend) assert len(results_futures) == 1 result = results_futures[0].result() assert isinstance(result, WorkflowResult) assert result.value.value == expected_value.value
def build_merge(prefix): # a - b \ # | - e - f # c - d / protocol_a = DummyProtocol(prefix + "protocol_a") protocol_a.input_value = 1 protocol_b = DummyProtocol(prefix + "protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) protocol_c = DummyProtocol(prefix + "protocol_c") protocol_c.input_value = 2 protocol_d = DummyProtocol(prefix + "protocol_d") protocol_d.input_value = ProtocolPath("output_value", protocol_c.id) protocol_e = DummyProtocol(prefix + "protocol_e") protocol_e.input_value = [ ProtocolPath("output_value", protocol_b.id), ProtocolPath("output_value", protocol_d.id), ] protocol_f = DummyProtocol(prefix + "protocol_f") protocol_f.input_value = ProtocolPath("output_value", protocol_e.id) return [ protocol_a, protocol_b, protocol_c, protocol_d, protocol_e, protocol_f, ]
def build_protocols(prefix): # .-------------------. # | / i - j -|- b # a - | g - h - | | # | \ k - l -|- c # .-------------------. protocol_a = DummyProtocol(prefix + "protocol_a") protocol_a.input_value = 1 fork_protocols = build_fork(prefix) fork_protocols[0].input_value = ProtocolPath("output_value", protocol_a.id) protocol_group = ProtocolGroup(prefix + "protocol_group") protocol_group.add_protocols(*fork_protocols) protocol_b = DummyProtocol(prefix + "protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_group.id, "protocol_j") protocol_c = DummyProtocol(prefix + "protocol_c") protocol_c.input_value = ProtocolPath("output_value", protocol_group.id, "protocol_l") return [protocol_a, protocol_group, protocol_b, protocol_c]
def test_index_replicated_protocol(): replicator = ProtocolReplicator("replicator") replicator.template_values = ["a", "b", "c", "d"] replicated_protocol = DummyProtocol( f"protocol_{replicator.placeholder_id}") replicated_protocol.input_value = ReplicatorValue(replicator.id) schema = WorkflowSchema() schema.protocol_replicators = [replicator] schema.protocol_schemas = [replicated_protocol.schema] for index in range(len(replicator.template_values)): indexing_protocol = DummyProtocol(f"indexing_protocol_{index}") indexing_protocol.input_value = ProtocolPath("output_value", f"protocol_{index}") schema.protocol_schemas.append(indexing_protocol.schema) schema.validate() workflow = Workflow({}) workflow.schema = schema
def test_from_schema(): protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = 1 * unit.kelvin schema = WorkflowSchema() schema.protocol_schemas = [protocol_a.schema] workflow = Workflow.from_schema(schema, {}, unique_id="") assert workflow is not None rebuilt_schema = workflow.schema rebuilt_schema.outputs_to_store = UNDEFINED assert rebuilt_schema.json(format=True) == schema.json(format=True)
def test_replicated_ids(): replicator = ProtocolReplicator("replicator-a") protocol_a = DummyProtocol("protocol-a") protocol_a.input_value = 1 group_a = ProtocolGroup(f"group-a-{replicator.placeholder_id}") group_a.add_protocols(protocol_a) schema = WorkflowSchema() schema.protocol_schemas = [group_a.schema] schema.protocol_replicators = [replicator] with pytest.raises(ValueError) as error_info: schema.validate() assert ( f"The children of replicated protocol {group_a.id} must also contain the " "replicators placeholder" in str(error_info.value))
def test_unique_ids(): protocol_a = DummyProtocol("protocol-a") protocol_a.input_value = 1 group_a = ProtocolGroup("group-a") group_a.add_protocols(protocol_a) group_b = ProtocolGroup("group-b") group_b.add_protocols(protocol_a) schema = WorkflowSchema() schema.protocol_schemas = [group_a.schema, group_b.schema] with pytest.raises(ValueError) as error_info: schema.validate() assert "Several protocols in the schema have the same id" in str( error_info.value) assert "protocol-a" in str(error_info.value)
def test_protocol_group_resume(): """A test that protocol groups can recover after being killed (e.g. by a worker being killed due to hitting a wallclock limit) """ compute_resources = ComputeResources() # Fake a protocol group which executes the first # two protocols and then 'gets killed'. protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = 1 protocol_b = DummyProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) protocol_group_a = ProtocolGroup("group_a") protocol_group_a.add_protocols(protocol_a, protocol_b) protocol_graph = ProtocolGraph() protocol_graph.add_protocols(protocol_group_a) protocol_graph.execute("graph_a", compute_resources=compute_resources) # Remove the output file so it appears the the protocol group had not # completed. os.unlink( os.path.join("graph_a", protocol_group_a.id, f"{protocol_group_a.id}_output.json")) # Build the 'full' group with the last two protocols which # 'had not been exited' after the group was 'killed' protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = 1 protocol_b = DummyProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) protocol_c = DummyProtocol("protocol_c") protocol_c.input_value = ProtocolPath("output_value", protocol_b.id) protocol_d = DummyProtocol("protocol_d") protocol_d.input_value = ProtocolPath("output_value", protocol_c.id) protocol_group_a = ProtocolGroup("group_a") protocol_group_a.add_protocols(protocol_a, protocol_b, protocol_c, protocol_d) protocol_graph = ProtocolGraph() protocol_graph.add_protocols(protocol_group_a) protocol_graph.execute("graph_a", compute_resources=compute_resources) assert all(x != UNDEFINED for x in protocol_group_a.outputs.values())
def build_fork(prefix): # / i - j # g - h - | # \ k - l protocol_g = DummyProtocol(prefix + "protocol_g") protocol_g.input_value = 3 protocol_h = DummyProtocol(prefix + "protocol_h") protocol_h.input_value = ProtocolPath("output_value", protocol_g.id) protocol_i = DummyProtocol(prefix + "protocol_i") protocol_i.input_value = ProtocolPath("output_value", protocol_h.id) protocol_j = DummyProtocol(prefix + "protocol_j") protocol_j.input_value = ProtocolPath("output_value", protocol_i.id) protocol_k = DummyProtocol(prefix + "protocol_k") protocol_k.input_value = ProtocolPath("output_value", protocol_h.id) protocol_l = DummyProtocol(prefix + "protocol_l") protocol_l.input_value = ProtocolPath("output_value", protocol_k.id) return [ protocol_g, protocol_h, protocol_i, protocol_j, protocol_k, protocol_l ]
def test_workflow_layer(): """Test the `WorkflowLayer` calculation layer. As the `SimulationLayer` is the simplest implementation of the abstract layer, we settle for testing this.""" properties_to_estimate = [ create_dummy_property(Density), create_dummy_property(Density), ] # Create a very simple workflow which just returns some placeholder # value. estimated_value = Observable( (1 * unit.kelvin).plus_minus(0.1 * unit.kelvin)) protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = estimated_value schema = WorkflowSchema() schema.protocol_schemas = [protocol_a.schema] schema.final_value_source = ProtocolPath("output_value", protocol_a.id) layer_schema = SimulationSchema() layer_schema.workflow_schema = schema options = RequestOptions() options.add_schema("SimulationLayer", "Density", layer_schema) batch = server.Batch() batch.queued_properties = properties_to_estimate batch.options = options with tempfile.TemporaryDirectory() as directory: with temporarily_change_directory(directory): # Create a directory for the layer. layer_directory = "simulation_layer" os.makedirs(layer_directory) # Set-up a simple storage backend and add a force field to it. force_field = SmirnoffForceFieldSource.from_path( "smirnoff99Frosst-1.1.0.offxml") storage_backend = LocalFileStorage() batch.force_field_id = storage_backend.store_force_field( force_field) # Create a simple calculation backend to test with. with DaskLocalCluster() as calculation_backend: def dummy_callback(returned_request): assert len(returned_request.estimated_properties) == 2 assert len(returned_request.exceptions) == 0 simulation_layer = SimulationLayer() simulation_layer.schedule_calculation( calculation_backend, storage_backend, layer_directory, batch, dummy_callback, True, )
def test_group_replicators(): dummy_schema = WorkflowSchema() replicator_id = "replicator" dummy_replicated_protocol = DummyProtocol(f"dummy_$({replicator_id})") dummy_replicated_protocol.input_value = ReplicatorValue(replicator_id) dummy_group = ProtocolGroup("dummy_group") dummy_group.add_protocols(dummy_replicated_protocol) dummy_protocol_single_value = DummyProtocol( f"dummy_single_$({replicator_id})") dummy_protocol_single_value.input_value = ProtocolPath( "output_value", dummy_group.id, dummy_replicated_protocol.id) dummy_protocol_list_value = AddValues("dummy_list") dummy_protocol_list_value.values = ProtocolPath( "output_value", dummy_group.id, dummy_replicated_protocol.id) dummy_schema.protocol_schemas = [ dummy_group.schema, dummy_protocol_single_value.schema, dummy_protocol_list_value.schema, ] replicator = ProtocolReplicator(replicator_id) replicator.template_values = [ (1.0 * unit.kelvin).plus_minus(1.0 * unit.kelvin), (2.0 * unit.kelvin).plus_minus(2.0 * unit.kelvin), ] dummy_schema.protocol_replicators = [replicator] dummy_schema.validate() dummy_property = create_dummy_property(Density) dummy_metadata = Workflow.generate_default_metadata( dummy_property, "smirnoff99Frosst-1.1.0.offxml", []) dummy_workflow = Workflow(dummy_metadata, "") dummy_workflow.schema = dummy_schema assert len(dummy_workflow.protocols) == 4 assert (dummy_workflow.protocols[dummy_group.id].protocols["dummy_0"]. input_value.value == replicator.template_values[0].value) assert (dummy_workflow.protocols[dummy_group.id].protocols["dummy_1"]. input_value.value == replicator.template_values[1].value) assert dummy_workflow.protocols[ "dummy_single_0"].input_value == ProtocolPath("output_value", dummy_group.id, "dummy_0") assert dummy_workflow.protocols[ "dummy_single_1"].input_value == ProtocolPath("output_value", dummy_group.id, "dummy_1") assert len(dummy_workflow.protocols["dummy_list"].values) == 2 assert dummy_workflow.protocols["dummy_list"].values[0] == ProtocolPath( "output_value", dummy_group.id, "dummy_0") assert dummy_workflow.protocols["dummy_list"].values[1] == ProtocolPath( "output_value", dummy_group.id, "dummy_1")
def test_nested_protocol_paths(): value_protocol_a = DummyProtocol("protocol_a") value_protocol_a.input_value = (1 * unit.kelvin).plus_minus(0.1 * unit.kelvin) assert (value_protocol_a.get_value(ProtocolPath("input_value.value")) == value_protocol_a.input_value.value) value_protocol_b = DummyProtocol("protocol_b") value_protocol_b.input_value = (2 * unit.kelvin).plus_minus(0.05 * unit.kelvin) value_protocol_c = DummyProtocol("protocol_c") value_protocol_c.input_value = (4 * unit.kelvin).plus_minus(0.01 * unit.kelvin) add_values_protocol = AddValues("add_values") add_values_protocol.values = [ ProtocolPath("output_value", value_protocol_a.id), ProtocolPath("output_value", value_protocol_b.id), ProtocolPath("output_value", value_protocol_b.id), 5, ] with pytest.raises(ValueError): add_values_protocol.get_value(ProtocolPath("valus[string]")) with pytest.raises(ValueError): add_values_protocol.get_value(ProtocolPath("values[string]")) input_values = add_values_protocol.get_value_references( ProtocolPath("values")) assert isinstance(input_values, dict) and len(input_values) == 3 for index, value_reference in enumerate(input_values): input_value = add_values_protocol.get_value(value_reference) assert input_value.full_path == add_values_protocol.values[ index].full_path add_values_protocol.set_value(value_reference, index) assert set(add_values_protocol.values) == {0, 1, 2, 5} dummy_dict_protocol = DummyProtocol("dict_protocol") dummy_dict_protocol.input_value = { "value_a": ProtocolPath("output_value", value_protocol_a.id), "value_b": ProtocolPath("output_value", value_protocol_b.id), } input_values = dummy_dict_protocol.get_value_references( ProtocolPath("input_value")) assert isinstance(input_values, dict) and len(input_values) == 2 for index, value_reference in enumerate(input_values): input_value = dummy_dict_protocol.get_value(value_reference) dummy_dict_keys = list(dummy_dict_protocol.input_value.keys()) assert (input_value.full_path == dummy_dict_protocol.input_value[ dummy_dict_keys[index]].full_path) dummy_dict_protocol.set_value(value_reference, index) add_values_protocol_2 = AddValues("add_values") add_values_protocol_2.values = [ [ProtocolPath("output_value", value_protocol_a.id)], [ ProtocolPath("output_value", value_protocol_b.id), ProtocolPath("output_value", value_protocol_b.id), ], ] with pytest.raises(ValueError): add_values_protocol_2.get_value(ProtocolPath("valus[string]")) with pytest.raises(ValueError): add_values_protocol.get_value(ProtocolPath("values[string]")) pass