def test_submission(): with tempfile.TemporaryDirectory() as directory: with temporarily_change_directory(directory): with DaskLocalCluster() as calculation_backend: # Spin up a server instance. server = EvaluatorServer( calculation_backend=calculation_backend, working_directory=directory, ) with server: # Connect a client. client = EvaluatorClient() # Submit an empty data set. force_field_path = "smirnoff99Frosst-1.1.0.offxml" force_field_source = SmirnoffForceFieldSource.from_path( force_field_path) request, error = client.request_estimate( PhysicalPropertyDataSet(), force_field_source) assert error is None assert isinstance(request, Request) result, error = request.results(polling_interval=0.01) assert error is None assert isinstance(result, RequestResult)
def main(): setup_timestamp_logging() # Load in the force field force_field_path = "smirnoff99Frosst-1.1.0.offxml" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) # Load in the data set containing the pure and binary properties. data_set = PhysicalPropertyDataSet.from_json("pure_data_set.json") data_set.merge(PhysicalPropertyDataSet.from_json("binary_data_set.json")) # Set up a server object to run the calculations using. server = setup_server(backend_type=BackendType.LocalGPU, max_number_of_workers=1, port=8001) with server: # Request the estimates. property_estimator = EvaluatorClient( ConnectionOptions(server_port=8001)) for calculation_layer in ["SimulationLayer", "ReweightingLayer"]: options = RequestOptions() options.calculation_layers = [calculation_layer] parameter_gradient_keys = [ ParameterGradientKey(tag="vdW", smirks="[#6X4:1]", attribute="epsilon"), ParameterGradientKey(tag="vdW", smirks="[#6X4:1]", attribute="rmin_half"), ] request, _ = property_estimator.request_estimate( property_set=data_set, force_field_source=force_field_source, options=options, parameter_gradient_keys=parameter_gradient_keys, ) # Wait for the results. results, _ = request.results(True, 5) layer_name = re.sub(r"(?<!^)(?=[A-Z])", "_", calculation_layer).lower() results.json(f"pure_binary_{layer_name}.json", True)
def test_default_options(): """Test creating the default estimation options.""" data_set = PhysicalPropertyDataSet() force_field_source = SmirnoffForceFieldSource.from_path( "smirnoff99Frosst-1.1.0.offxml") for property_type in property_types: physical_property = create_dummy_property(property_type) data_set.add_properties(physical_property) options = EvaluatorClient.default_request_options(data_set, force_field_source) options.validate() assert len(options.calculation_layers) == 2 assert len(options.calculation_schemas) == len(property_types) assert all( len(x) == len(options.calculation_layers) for x in options.calculation_schemas.values())
def build_tip3p_smirnoff_force_field(): """Combines the smirnoff99Frosst and tip3p offxml files into a single one which can be consumed by the property estimator. Returns ------- SmirnoffForceFieldSource The force field containing both smirnoff99Frosst-1.1.0 and TIP3P parameters """ from openforcefield.typing.engines.smirnoff import ForceField smirnoff_force_field_path = "smirnoff99Frosst-1.1.0.offxml" tip3p_force_field_path = get_data_filename("forcefield/tip3p.offxml") smirnoff_force_field_with_tip3p = ForceField(smirnoff_force_field_path, tip3p_force_field_path) return SmirnoffForceFieldSource.from_object( smirnoff_force_field_with_tip3p)
def _get_solvent_dictionary(self): """Returns a dictionary of the solvent which will be serialized to a yaml file and passed to YANK. In most cases, this should just be passing force field settings over, such as PME settings. Returns ------- dict of str and Any A yaml compatible dictionary of YANK solvents. """ with open(self.force_field_path, "r") as file: force_field_source = SmirnoffForceFieldSource.parse_json(file.read()) force_field = force_field_source.to_force_field() charge_method = force_field.get_parameter_handler("Electrostatics").method if charge_method.lower() != "pme": raise ValueError("Currently only PME electrostatics are supported.") return {"default": {"nonbonded_method": charge_method}}
def main(): setup_timestamp_logging() # Load in the force field force_field_path = "smirnoff99Frosst-1.1.0.offxml" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) # Create a data set containing three solvation free energies. data_set = PhysicalPropertyDataSet.from_json("hydration_data_set.json") data_set.json("hydration_data_set.json", format=True) # Set up a server object to run the calculations using. server = setup_server(backend_type=BackendType.LocalGPU, max_number_of_workers=1, port=8002) with server: # Request the estimates. property_estimator = EvaluatorClient( ConnectionOptions(server_port=8002)) options = RequestOptions() options.calculation_layers = ["SimulationLayer"] options.add_schema("SimulationLayer", "SolvationFreeEnergy", _get_fixed_lambda_schema()) request, _ = property_estimator.request_estimate( property_set=data_set, force_field_source=force_field_source, options=options, ) # Wait for the results. results, _ = request.results(True, 60) # Save the result to file. results.json("results.json", True)
def test_force_field_storage(): """A simple test to that force fields can be stored and retrieved using the local storage backend.""" force_field_source = SmirnoffForceFieldSource.from_path( "smirnoff99Frosst-1.1.0.offxml") with tempfile.TemporaryDirectory() as temporary_directory: local_storage = LocalFileStorage(temporary_directory) force_field_id = local_storage.store_force_field(force_field_source) retrieved_force_field = local_storage.retrieve_force_field( force_field_id) assert force_field_source.json() == retrieved_force_field.json() local_storage_new = LocalFileStorage(temporary_directory) assert local_storage_new.has_force_field(force_field_source) new_force_field_id = local_storage_new.store_force_field( force_field_source) assert new_force_field_id == force_field_id
def test_storage_retrieval(): # Create some dummy properties methane = Substance.from_components("C") methanol = Substance.from_components("CO") mixture = Substance.from_components("C", "CO") # Add extra unused data to make sure the wrong data isn't # Being retrieved. unused_pure = Substance.from_components("CCO") unused_mixture = Substance.from_components("CCO", "CO") data_to_store = [ (methane, PropertyPhase.Liquid, 1000), (methanol, PropertyPhase.Liquid, 1000), (methanol, PropertyPhase.Gas, 1), (mixture, PropertyPhase.Liquid, 1000), (unused_pure, PropertyPhase.Liquid, 1000), (unused_mixture, PropertyPhase.Liquid, 1000), ] storage_keys = {} state = ThermodynamicState(temperature=1.0 * unit.kelvin) properties = [ # Properties with a full system query. Density( value=1.0 * unit.gram / unit.litre, substance=methanol, thermodynamic_state=state, ), DielectricConstant( value=1.0 * unit.dimensionless, substance=methane, thermodynamic_state=state ), # Properties with a multi-component query. EnthalpyOfVaporization( value=1.0 * unit.joule / unit.mole, substance=methanol, thermodynamic_state=state, ), # Property with a multi-phase query. EnthalpyOfMixing( value=1.0 * unit.joule / unit.mole, substance=mixture, thermodynamic_state=state, ), ExcessMolarVolume( value=1.0 * unit.meter ** 3, substance=mixture, thermodynamic_state=state ), ] expected_data_per_property = { Density: {"full_system_data": [(methanol, PropertyPhase.Liquid, 1000)]}, DielectricConstant: { "full_system_data": [(methane, PropertyPhase.Liquid, 1000)] }, EnthalpyOfVaporization: { "liquid_data": [(methanol, PropertyPhase.Liquid, 1000)], "gas_data": [(methanol, PropertyPhase.Gas, 1)], }, EnthalpyOfMixing: { "full_system_data": [(mixture, PropertyPhase.Liquid, 1000)], "component_data": [ [(methane, PropertyPhase.Liquid, 1000)], [(methanol, PropertyPhase.Liquid, 1000)], ], }, ExcessMolarVolume: { "full_system_data": [(mixture, PropertyPhase.Liquid, 1000)], "component_data": [ [(methane, PropertyPhase.Liquid, 1000)], [(methanol, PropertyPhase.Liquid, 1000)], ], }, } force_field = SmirnoffForceFieldSource.from_path("smirnoff99Frosst-1.1.0.offxml") with tempfile.TemporaryDirectory() as base_directory: # Create a storage backend with some dummy data. backend_directory = os.path.join(base_directory, "storage_dir") storage_backend = LocalFileStorage(backend_directory) force_field_id = storage_backend.store_force_field(force_field) for substance, phase, n_mol in data_to_store: data_directory = os.path.join(base_directory, substance.identifier) data = create_dummy_simulation_data( data_directory, substance=substance, force_field_id=force_field_id, phase=phase, number_of_molecules=n_mol, ) storage_key = storage_backend.store_object(data, data_directory) storage_keys[(substance, phase, n_mol)] = storage_key for physical_property in properties: schema = registered_calculation_schemas["ReweightingLayer"][ physical_property.__class__.__name__ ] if callable(schema): schema = schema() # noinspection PyProtectedMember metadata = ReweightingLayer._get_workflow_metadata( base_directory, physical_property, "", [], storage_backend, schema, ) assert metadata is not None expected_data_list = expected_data_per_property[physical_property.__class__] for data_key in expected_data_list: assert data_key in metadata stored_metadata = metadata[data_key] expected_metadata = expected_data_list[data_key] assert len(stored_metadata) == len(expected_metadata) if isinstance(stored_metadata[0], list): # Flatten any lists of lists. stored_metadata = [ item for sublist in stored_metadata for item in sublist ] expected_metadata = [ item for sublist in expected_metadata for item in sublist ] metadata_storage_keys = [ os.path.basename(x) for x, _, _ in stored_metadata ] expected_storage_keys = [storage_keys[x] for x in expected_metadata] assert sorted(metadata_storage_keys) == sorted(expected_storage_keys)
options = EvaluatorClient.default_request_options(data_set, force_field_source) options.validate() assert len(options.calculation_layers) == 2 assert len(options.calculation_schemas) == len(property_types) assert all( len(x) == len(options.calculation_layers) for x in options.calculation_schemas.values()) @pytest.mark.parametrize( "force_field_source, expected_protocol_type", [ ( SmirnoffForceFieldSource.from_path( "smirnoff99Frosst-1.1.0.offxml"), "BuildSmirnoffSystem", ), (TLeapForceFieldSource(), "BuildTLeapSystem"), (LigParGenForceFieldSource(), "BuildLigParGenSystem"), ], ) def test_protocol_replacement(force_field_source, expected_protocol_type): data_set = PhysicalPropertyDataSet() for property_type in property_types: physical_property = create_dummy_property(property_type) data_set.add_properties(physical_property) options = EvaluatorClient.default_request_options(data_set,
def request_estimate( self, property_set, force_field_source, options=None, parameter_gradient_keys=None, ): """Submits a request for the `EvaluatorServer` to attempt to estimate the data set of physical properties using the specified force field parameters according to the provided options. Parameters ---------- property_set : PhysicalPropertyDataSet The set of properties to estimate. force_field_source : ForceFieldSource or openforcefield.typing.engines.smirnoff.ForceField The force field parameters to estimate the properties using. options : RequestOptions, optional A set of estimator options. If `None` default options will be used (see `default_request_options`). parameter_gradient_keys: list of ParameterGradientKey, optional A list of the parameters that the physical properties should be differentiated with respect to. Returns ------- Request An object which will provide access to the results of this request. EvaluatorException, optional Any exceptions raised while attempting the submit the request. """ from openforcefield.typing.engines import smirnoff if property_set is None or force_field_source is None: raise ValueError("Both a data set and force field source must be " "present to compute physical properties.") if parameter_gradient_keys is None: parameter_gradient_keys = [] # Handle the conversion of a SMIRNOFF force field object # for backwards compatibility. if isinstance(force_field_source, smirnoff.ForceField): force_field_source = SmirnoffForceFieldSource.from_object( force_field_source) # Fill in any missing options with default values if options is None: options = self.default_request_options(property_set, force_field_source) else: options = copy.deepcopy(options) self._populate_request_options(options, property_set, force_field_source) # Make sure the options are valid. options.validate() # Build the submission object. submission = EvaluatorClient._Submission() submission.dataset = property_set submission.force_field_source = force_field_source submission.options = options submission.parameter_gradient_keys = parameter_gradient_keys # Ensure the submission is valid. submission.validate() # Send the submission to the server. request_id, error = self._send_calculations_to_server(submission) # Build the object which represents this request. request_object = None if error is None: request_object = Request(self) request_object.id = request_id return request_object, error
def main(): setup_timestamp_logging() # Retrieve the current version. version = evaluator.__version__.replace(".", "-").replace("v", "") if "+" in version: version = "latest" # Create a new directory to run the current versions results in. os.makedirs(os.path.join(version, "results")) with temporarily_change_directory(version): # Load in the force field force_field = ForceField( "openff-1.2.0.offxml", get_data_filename("forcefield/tip3p.offxml"), ) force_field_source = SmirnoffForceFieldSource.from_object(force_field) force_field_source.json("force-field.json") # Load in the data set, retaining only a specific host / guest pair. binding_affinity = TaproomDataSet( host_codes=["acd"], guest_codes=["bam"], default_ionic_strength=150 * unit.millimolar, ).properties[0] # Set up the calculation schema = HostGuestBindingAffinity.default_paprika_schema( n_solvent_molecules=2000).workflow_schema schema.replace_protocol_types({ "BaseBuildSystem": ("BuildSmirnoffSystem" if isinstance( force_field_source, SmirnoffForceFieldSource) else "BuildTLeapSystem" if isinstance( force_field_source, TLeapForceFieldSource) else "BaseBuildSystem") }) metadata = Workflow.generate_default_metadata(binding_affinity, "force-field.json", UNDEFINED) workflow = Workflow.from_schema(schema, metadata, "acd_bam") # Run the calculation with DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=QueueWorkerResources( number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 * unit.gigabyte, wallclock_time_limit="05:59", ), setup_script_commands=[ "conda activate openff-evaluator-paprika", "module load cuda/10.0", ], queue_name="gpuqueue", ) as calculation_backend: results = workflow.execute( root_directory="workflow", calculation_backend=calculation_backend).result() # Save the results results.json("results.json", format=True)
def test_workflow_layer(): """Test the `WorkflowLayer` calculation layer. As the `SimulationLayer` is the simplest implementation of the abstract layer, we settle for testing this.""" properties_to_estimate = [ create_dummy_property(Density), create_dummy_property(Density), ] # Create a very simple workflow which just returns some placeholder # value. estimated_value = Observable( (1 * unit.kelvin).plus_minus(0.1 * unit.kelvin)) protocol_a = DummyProtocol("protocol_a") protocol_a.input_value = estimated_value schema = WorkflowSchema() schema.protocol_schemas = [protocol_a.schema] schema.final_value_source = ProtocolPath("output_value", protocol_a.id) layer_schema = SimulationSchema() layer_schema.workflow_schema = schema options = RequestOptions() options.add_schema("SimulationLayer", "Density", layer_schema) batch = server.Batch() batch.queued_properties = properties_to_estimate batch.options = options with tempfile.TemporaryDirectory() as directory: with temporarily_change_directory(directory): # Create a directory for the layer. layer_directory = "simulation_layer" os.makedirs(layer_directory) # Set-up a simple storage backend and add a force field to it. force_field = SmirnoffForceFieldSource.from_path( "smirnoff99Frosst-1.1.0.offxml") storage_backend = LocalFileStorage() batch.force_field_id = storage_backend.store_force_field( force_field) # Create a simple calculation backend to test with. with DaskLocalCluster() as calculation_backend: def dummy_callback(returned_request): assert len(returned_request.estimated_properties) == 2 assert len(returned_request.exceptions) == 0 simulation_layer = SimulationLayer() simulation_layer.schedule_calculation( calculation_backend, storage_backend, layer_directory, batch, dummy_callback, True, )