def test_solvate_existing_structure_protocol(): """Tests solvating a single methanol molecule in water.""" import mdtraj methanol_component = Component("CO") methanol_substance = Substance() methanol_substance.add_component(methanol_component, ExactAmount(1)) water_substance = Substance() water_substance.add_component(Component("O"), MoleFraction(1.0)) with tempfile.TemporaryDirectory() as temporary_directory: build_methanol_coordinates = BuildCoordinatesPackmol("build_methanol") build_methanol_coordinates.max_molecules = 1 build_methanol_coordinates.substance = methanol_substance build_methanol_coordinates.execute(temporary_directory, ComputeResources()) methanol_residue_name = build_methanol_coordinates.assigned_residue_names[ methanol_component.identifier] solvate_coordinates = SolvateExistingStructure("solvate_methanol") solvate_coordinates.max_molecules = 9 solvate_coordinates.substance = water_substance solvate_coordinates.solute_coordinate_file = ( build_methanol_coordinates.coordinate_file_path) solvate_coordinates.execute(temporary_directory, ComputeResources()) solvated_system = mdtraj.load_pdb( solvate_coordinates.coordinate_file_path) assert solvated_system.n_residues == 10 assert solvated_system.top.residue(0).name == methanol_residue_name
def test_run_openmm_simulation_checkpoints(): import mdtraj thermodynamic_state = ThermodynamicState(298 * unit.kelvin, 1.0 * unit.atmosphere) with tempfile.TemporaryDirectory() as directory: coordinate_path, system_path = _setup_dummy_system(directory) # Check that executing twice doesn't run the simulation twice npt_equilibration = OpenMMSimulation("npt_equilibration") npt_equilibration.total_number_of_iterations = 1 npt_equilibration.steps_per_iteration = 4 npt_equilibration.output_frequency = 1 npt_equilibration.thermodynamic_state = thermodynamic_state npt_equilibration.input_coordinate_file = coordinate_path npt_equilibration.system_path = system_path npt_equilibration.execute(directory, ComputeResources()) assert os.path.isfile(npt_equilibration._checkpoint_path) npt_equilibration.execute(directory, ComputeResources()) assert (len( StatisticsArray.from_pandas_csv( npt_equilibration.statistics_file_path)) == 4) assert (len( mdtraj.load(npt_equilibration.trajectory_file_path, top=coordinate_path)) == 4) # Make sure that the output files are correctly truncating if more frames # than expected are written with open(npt_equilibration._checkpoint_path, "r") as file: checkpoint = json.load(file, cls=TypedJSONDecoder) # Fake having saved more frames than expected npt_equilibration.steps_per_iteration = 8 checkpoint.steps_per_iteration = 8 npt_equilibration.output_frequency = 2 checkpoint.output_frequency = 2 with open(npt_equilibration._checkpoint_path, "w") as file: json.dump(checkpoint, file, cls=TypedJSONEncoder) npt_equilibration.execute(directory, ComputeResources()) assert (len( StatisticsArray.from_pandas_csv( npt_equilibration.statistics_file_path)) == 4) assert (len( mdtraj.load(npt_equilibration.trajectory_file_path, top=coordinate_path)) == 4)
def test_build_docked_coordinates_protocol(): """Tests docking a methanol molecule into alpha-Cyclodextrin.""" if not has_openeye(): pytest.skip("The `BuildDockedCoordinates` protocol requires OpenEye.") ligand_substance = Substance() ligand_substance.add_component( Component("CO", role=Component.Role.Ligand), ExactAmount(1), ) # TODO: This test could likely be made substantially faster # by storing the binary prepared receptor. Would this # be in breach of any oe license terms? with tempfile.TemporaryDirectory() as temporary_directory: build_docked_coordinates = BuildDockedCoordinates("build_methanol") build_docked_coordinates.ligand_substance = ligand_substance build_docked_coordinates.number_of_ligand_conformers = 5 build_docked_coordinates.receptor_coordinate_file = get_data_filename( "test/molecules/acd.mol2") build_docked_coordinates.execute(temporary_directory, ComputeResources()) docked_pdb = PDBFile( build_docked_coordinates.docked_complex_coordinate_path) assert docked_pdb.topology.getNumResidues() == 2
def execute(self, root_directory="", calculation_backend=None, compute_resources=None): """Executes the workflow. Parameters ---------- root_directory: str The directory to execute the graph in. calculation_backend: CalculationBackend, optional. The backend to execute the graph on. This parameter is mutually exclusive with `compute_resources`. compute_resources: CalculationBackend, optional. The compute resources to run using. If None and no `calculation_backend` is specified, the workflow will be executed on a single CPU thread. This parameter is mutually exclusive with `calculation_backend`. Returns ------- WorkflowResult or Future of WorkflowResult: The result of executing this workflow. If executed on a `calculation_backend`, the result will be wrapped in a `Future` object. """ if calculation_backend is None and compute_resources is None: compute_resources = ComputeResources(number_of_threads=1) workflow_graph = self.to_graph() return workflow_graph.execute(root_directory, calculation_backend, compute_resources)[0]
def test_gradient_reduced_potentials(use_subset): substance = Substance.from_components("O") thermodynamic_state = ThermodynamicState(298 * unit.kelvin, 1.0 * unit.atmosphere) with tempfile.TemporaryDirectory() as directory: force_field_path = path.join(directory, "ff.json") with open(force_field_path, "w") as file: file.write(build_tip3p_smirnoff_force_field().json()) reduced_potentials = OpenMMGradientPotentials(f"reduced_potentials") reduced_potentials.substance = substance reduced_potentials.thermodynamic_state = thermodynamic_state reduced_potentials.statistics_path = get_data_filename( "test/statistics/stats_pandas.csv") reduced_potentials.force_field_path = force_field_path reduced_potentials.trajectory_file_path = get_data_filename( "test/trajectories/water.dcd") reduced_potentials.coordinate_file_path = get_data_filename( "test/trajectories/water.pdb") reduced_potentials.use_subset_of_force_field = use_subset reduced_potentials.enable_pbc = True reduced_potentials.parameter_key = ParameterGradientKey( "vdW", "[#1]-[#8X2H2+0:1]-[#1]", "epsilon") reduced_potentials.execute(directory, ComputeResources()) assert path.isfile(reduced_potentials.forward_potentials_path) assert path.isfile(reduced_potentials.reverse_potentials_path)
def test_central_difference_gradient(): with tempfile.TemporaryDirectory() as directory: gradient_key = ParameterGradientKey("vdW", "[#1]-[#8X2H2+0:1]-[#1]", "epsilon") reverse_parameter = -random.random() * unit.kelvin reverse_observable = -random.random() * unit.kelvin forward_parameter = random.random() * unit.kelvin forward_observable = random.random() * unit.kelvin central_difference = CentralDifferenceGradient("central_difference") central_difference.parameter_key = gradient_key central_difference.reverse_observable_value = reverse_observable central_difference.reverse_parameter_value = reverse_parameter central_difference.forward_observable_value = forward_observable central_difference.forward_parameter_value = forward_parameter central_difference.execute(directory, ComputeResources()) assert central_difference.gradient.value == ( (forward_observable - reverse_observable) / (forward_parameter - reverse_parameter))
def test_conditional_protocol_group_fail(): with tempfile.TemporaryDirectory() as directory: initial_value = 2 * unit.kelvin value_protocol_a = DummyInputOutputProtocol("protocol_a") value_protocol_a.input_value = initial_value add_values = AddValues("add_values") add_values.values = [ ProtocolPath("output_value", value_protocol_a.id), ProtocolPath("output_value", value_protocol_a.id), ] condition = ConditionalGroup.Condition() condition.left_hand_value = ProtocolPath("result", add_values.id) condition.right_hand_value = ProtocolPath("output_value", value_protocol_a.id) condition.type = ConditionalGroup.Condition.Type.LessThan protocol_group = ConditionalGroup("protocol_group") protocol_group.conditions.append(condition) protocol_group.max_iterations = 10 protocol_group.add_protocols(value_protocol_a, add_values) with pytest.raises(RuntimeError): protocol_group.execute(directory, ComputeResources())
def test_substance_filtering_protocol(filter_role): """Tests that the protocol to filter substances by role correctly works.""" def create_substance(): test_substance = Substance() test_substance.add_component( Component("C", role=Component.Role.Solute), ExactAmount(1), ) test_substance.add_component( Component("CC", role=Component.Role.Ligand), ExactAmount(1), ) test_substance.add_component( Component("CCC", role=Component.Role.Receptor), ExactAmount(1), ) test_substance.add_component( Component("O", role=Component.Role.Solvent), MoleFraction(1.0), ) return test_substance filter_protocol = FilterSubstanceByRole("filter_protocol") filter_protocol.input_substance = create_substance() filter_protocol.component_roles = [filter_role] filter_protocol.execute("", ComputeResources()) assert len(filter_protocol.filtered_substance.components) == 1 assert filter_protocol.filtered_substance.components[0].role == filter_role
def test_conditional_group_self_reference(): """Tests that protocols within a conditional group can access the outputs of its parent, such as the current iteration of the group.""" max_iterations = 10 criteria = random.randint(1, max_iterations - 1) group = ConditionalGroup("conditional_group") group.max_iterations = max_iterations protocol = DummyInputOutputProtocol("protocol_a") protocol.input_value = ProtocolPath("current_iteration", group.id) condition_1 = ConditionalGroup.Condition() condition_1.left_hand_value = ProtocolPath("output_value", group.id, protocol.id) condition_1.right_hand_value = criteria condition_1.type = ConditionalGroup.Condition.Type.GreaterThan condition_2 = ConditionalGroup.Condition() condition_2.left_hand_value = ProtocolPath("current_iteration", group.id) condition_2.right_hand_value = criteria condition_2.type = ConditionalGroup.Condition.Type.GreaterThan group.add_protocols(protocol) group.add_condition(condition_1) group.add_condition(condition_2) with tempfile.TemporaryDirectory() as directory: group.execute(directory, ComputeResources()) assert protocol.output_value == criteria + 1
def test_reweight_statistics(): number_of_frames = 10 reduced_potentials = (np.ones(number_of_frames) * random.random() * unit.dimensionless) potentials = (np.ones(number_of_frames) * random.random() * unit.kilojoule / unit.mole) with tempfile.TemporaryDirectory() as directory: statistics_path = path.join(directory, "stats.csv") statistics_array = StatisticsArray() statistics_array[ObservableType.ReducedPotential] = reduced_potentials statistics_array[ObservableType.PotentialEnergy] = potentials statistics_array.to_pandas_csv(statistics_path) reweight_protocol = ReweightStatistics(f"reduced_potentials") reweight_protocol.statistics_type = ObservableType.PotentialEnergy reweight_protocol.statistics_paths = statistics_path reweight_protocol.reference_reduced_potentials = statistics_path reweight_protocol.target_reduced_potentials = statistics_path reweight_protocol.bootstrap_uncertainties = True reweight_protocol.required_effective_samples = 0 reweight_protocol.execute(directory, ComputeResources())
def test_conditional_protocol_group(): with tempfile.TemporaryDirectory() as directory: initial_value = 2 * unit.kelvin value_protocol_a = DummyInputOutputProtocol("protocol_a") value_protocol_a.input_value = initial_value add_values = AddValues("add_values") add_values.values = [ ProtocolPath("output_value", value_protocol_a.id), ProtocolPath("output_value", value_protocol_a.id), ] condition = ConditionalGroup.Condition() condition.left_hand_value = ProtocolPath("result", add_values.id) condition.right_hand_value = ProtocolPath("output_value", value_protocol_a.id) condition.type = ConditionalGroup.Condition.Type.GreaterThan protocol_group = ConditionalGroup("protocol_group") protocol_group.conditions.append(condition) protocol_group.add_protocols(value_protocol_a, add_values) protocol_group.execute(directory, ComputeResources()) assert (protocol_group.get_value(ProtocolPath( "result", add_values.id)) == 4 * unit.kelvin)
def test_multiply_values_protocol(value, multiplier): with tempfile.TemporaryDirectory() as temporary_directory: multiply_quantities = MultiplyValue("multiply") multiply_quantities.value = value multiply_quantities.multiplier = multiplier multiply_quantities.execute(temporary_directory, ComputeResources()) assert multiply_quantities.result == value * multiplier
def test_add_values_protocol(values): with tempfile.TemporaryDirectory() as temporary_directory: add_quantities = AddValues("add") add_quantities.values = values add_quantities.execute(temporary_directory, ComputeResources()) assert add_quantities.result == reduce(operator.add, values)
def test_divide_values_protocol(value, divisor): with tempfile.TemporaryDirectory() as temporary_directory: divide_quantities = DivideValue("divide") divide_quantities.value = value divide_quantities.divisor = divisor divide_quantities.execute(temporary_directory, ComputeResources()) assert divide_quantities.result == value / divisor
def test_protocol_group_exceptions(): exception_protocol = ExceptionProtocol("exception_protocol") protocol_group = ProtocolGroup("protocol_group") protocol_group.add_protocols(exception_protocol) with tempfile.TemporaryDirectory() as directory: with pytest.raises(RuntimeError): protocol_group.execute(directory, ComputeResources())
def test_subtract_values_protocol(values): with tempfile.TemporaryDirectory() as temporary_directory: sub_quantities = SubtractValues("sub") sub_quantities.value_b = values[1] sub_quantities.value_a = values[0] sub_quantities.execute(temporary_directory, ComputeResources()) assert sub_quantities.result == values[1] - values[0]
def test_extract_average_statistic(): statistics_path = get_data_filename("test/statistics/stats_pandas.csv") with tempfile.TemporaryDirectory() as temporary_directory: extract_protocol = ExtractAverageStatistic("extract_protocol") extract_protocol.statistics_path = statistics_path extract_protocol.statistics_type = ObservableType.PotentialEnergy extract_protocol.execute(temporary_directory, ComputeResources())
def test_run_energy_minimisation(): with tempfile.TemporaryDirectory() as directory: coordinate_path, system_path = _setup_dummy_system(directory) energy_minimisation = OpenMMEnergyMinimisation("energy_minimisation") energy_minimisation.input_coordinate_file = coordinate_path energy_minimisation.system_path = system_path energy_minimisation.execute(directory, ComputeResources()) assert path.isfile(energy_minimisation.output_coordinate_file)
def test_concatenate_statistics(): statistics_path = get_data_filename("test/statistics/stats_pandas.csv") original_array = StatisticsArray.from_pandas_csv(statistics_path) with tempfile.TemporaryDirectory() as temporary_directory: concatenate_protocol = ConcatenateStatistics("concatenate_protocol") concatenate_protocol.input_statistics_paths = [ statistics_path, statistics_path ] concatenate_protocol.execute(temporary_directory, ComputeResources()) final_array = StatisticsArray.from_pandas_csv( concatenate_protocol.output_statistics_path) assert len(final_array) == len(original_array) * 2
def test_protocol_group_resume(): """A test that protocol groups can recover after being killed (e.g. by a worker being killed due to hitting a wallclock limit) """ compute_resources = ComputeResources() # Fake a protocol group which executes the first # two protocols and then 'gets killed'. protocol_a = DummyInputOutputProtocol("protocol_a") protocol_a.input_value = 1 protocol_b = DummyInputOutputProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) protocol_group_a = ProtocolGroup("group_a") protocol_group_a.add_protocols(protocol_a, protocol_b) protocol_graph = ProtocolGraph() protocol_graph.add_protocols(protocol_group_a) protocol_graph.execute("graph_a", compute_resources=compute_resources) # Remove the output file so it appears the the protocol group had not # completed. os.unlink( os.path.join("graph_a", protocol_group_a.id, f"{protocol_group_a.id}_output.json")) # Build the 'full' group with the last two protocols which # 'had not been exited' after the group was 'killed' protocol_a = DummyInputOutputProtocol("protocol_a") protocol_a.input_value = 1 protocol_b = DummyInputOutputProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) protocol_c = DummyInputOutputProtocol("protocol_c") protocol_c.input_value = ProtocolPath("output_value", protocol_b.id) protocol_d = DummyInputOutputProtocol("protocol_d") protocol_d.input_value = ProtocolPath("output_value", protocol_c.id) protocol_group_a = ProtocolGroup("group_a") protocol_group_a.add_protocols(protocol_a, protocol_b, protocol_c, protocol_d) protocol_graph = ProtocolGraph() protocol_graph.add_protocols(protocol_group_a) protocol_graph.execute("graph_a", compute_resources=compute_resources) assert all(x != UNDEFINED for x in protocol_group_a.outputs.values())
def test_weight_by_mole_fraction_protocol(component_smiles, value): full_substance = Substance.from_components("C", "CC", "CCC") component = Substance.from_components(component_smiles) mole_fraction = next( iter(full_substance.get_amounts(component.components[0].identifier)) ).value with tempfile.TemporaryDirectory() as temporary_directory: weight_protocol = WeightByMoleFraction("weight") weight_protocol.value = value weight_protocol.full_substance = full_substance weight_protocol.component = component weight_protocol.execute(temporary_directory, ComputeResources()) assert weight_protocol.weighted_value == value * mole_fraction
def test_protocol_group_execution(): protocol_a = DummyInputOutputProtocol("protocol_a") protocol_a.input_value = 1 protocol_b = DummyInputOutputProtocol("protocol_b") protocol_b.input_value = ProtocolPath("output_value", protocol_a.id) protocol_group = ProtocolGroup("protocol_group") protocol_group.add_protocols(protocol_a, protocol_b) with tempfile.TemporaryDirectory() as directory: protocol_group.execute(directory, ComputeResources()) value_path = ProtocolPath("output_value", protocol_group.id, protocol_b.id) final_value = protocol_group.get_value(value_path) assert final_value == protocol_a.input_value
def test_extract_uncorrelated_statistics_data(): statistics_path = get_data_filename("test/statistics/stats_pandas.csv") original_array = StatisticsArray.from_pandas_csv(statistics_path) with tempfile.TemporaryDirectory() as temporary_directory: extract_protocol = ExtractUncorrelatedStatisticsData( "extract_protocol") extract_protocol.input_statistics_path = statistics_path extract_protocol.equilibration_index = 2 extract_protocol.statistical_inefficiency = 2.0 extract_protocol.execute(temporary_directory, ComputeResources()) final_array = StatisticsArray.from_pandas_csv( extract_protocol.output_statistics_path) assert len(final_array) == (len(original_array) - 2) / 2 assert (extract_protocol.number_of_uncorrelated_samples == (len(original_array) - 2) / 2)
def test_calculate_reduced_potential_openmm(): substance = Substance.from_components("O") thermodynamic_state = ThermodynamicState(298 * unit.kelvin, 1.0 * unit.atmosphere) with tempfile.TemporaryDirectory() as directory: force_field_path = path.join(directory, "ff.json") with open(force_field_path, "w") as file: file.write(build_tip3p_smirnoff_force_field().json()) build_coordinates = BuildCoordinatesPackmol("build_coordinates") build_coordinates.max_molecules = 10 build_coordinates.mass_density = 0.05 * unit.grams / unit.milliliters build_coordinates.substance = substance build_coordinates.execute(directory, None) assign_parameters = BuildSmirnoffSystem(f"assign_parameters") assign_parameters.force_field_path = force_field_path assign_parameters.coordinate_file_path = build_coordinates.coordinate_file_path assign_parameters.substance = substance assign_parameters.execute(directory, None) reduced_potentials = OpenMMReducedPotentials(f"reduced_potentials") reduced_potentials.substance = substance reduced_potentials.thermodynamic_state = thermodynamic_state reduced_potentials.reference_force_field_paths = [force_field_path] reduced_potentials.system_path = assign_parameters.system_path reduced_potentials.trajectory_file_path = get_data_filename( "test/trajectories/water.dcd") reduced_potentials.coordinate_file_path = get_data_filename( "test/trajectories/water.pdb") reduced_potentials.kinetic_energies_path = get_data_filename( "test/statistics/stats_pandas.csv") reduced_potentials.high_precision = False reduced_potentials.execute(directory, ComputeResources()) assert path.isfile(reduced_potentials.statistics_file_path) final_array = StatisticsArray.from_pandas_csv( reduced_potentials.statistics_file_path) assert ObservableType.ReducedPotential in final_array
def test_run_openmm_simulation(): thermodynamic_state = ThermodynamicState(298 * unit.kelvin, 1.0 * unit.atmosphere) with tempfile.TemporaryDirectory() as directory: coordinate_path, system_path = _setup_dummy_system(directory) npt_equilibration = OpenMMSimulation("npt_equilibration") npt_equilibration.steps_per_iteration = 2 npt_equilibration.output_frequency = 1 npt_equilibration.thermodynamic_state = thermodynamic_state npt_equilibration.input_coordinate_file = coordinate_path npt_equilibration.system_path = system_path npt_equilibration.execute(directory, ComputeResources()) assert path.isfile(npt_equilibration.output_coordinate_file) assert path.isfile(npt_equilibration.trajectory_file_path) assert path.isfile(npt_equilibration.statistics_file_path)
def test_concatenate_trajectories(): import mdtraj coordinate_path = get_data_filename("test/trajectories/water.pdb") trajectory_path = get_data_filename("test/trajectories/water.dcd") original_trajectory = mdtraj.load(trajectory_path, top=coordinate_path) with tempfile.TemporaryDirectory() as temporary_directory: concatenate_protocol = ConcatenateTrajectories("concatenate_protocol") concatenate_protocol.input_coordinate_paths = [ coordinate_path, coordinate_path ] concatenate_protocol.input_trajectory_paths = [ trajectory_path, trajectory_path ] concatenate_protocol.execute(temporary_directory, ComputeResources()) final_trajectory = mdtraj.load( concatenate_protocol.output_trajectory_path, top=coordinate_path) assert len(final_trajectory) == len(original_trajectory) * 2
def test_extract_uncorrelated_trajectory_data(): import mdtraj coordinate_path = get_data_filename("test/trajectories/water.pdb") trajectory_path = get_data_filename("test/trajectories/water.dcd") original_trajectory = mdtraj.load(trajectory_path, top=coordinate_path) with tempfile.TemporaryDirectory() as temporary_directory: extract_protocol = ExtractUncorrelatedTrajectoryData( "extract_protocol") extract_protocol.input_coordinate_file = coordinate_path extract_protocol.input_trajectory_path = trajectory_path extract_protocol.equilibration_index = 2 extract_protocol.statistical_inefficiency = 2.0 extract_protocol.execute(temporary_directory, ComputeResources()) final_trajectory = mdtraj.load(extract_protocol.output_trajectory_path, top=coordinate_path) assert len(final_trajectory) == (len(original_trajectory) - 2) / 2 assert (extract_protocol.number_of_uncorrelated_samples == (len(original_trajectory) - 2) / 2)
def _get_options_dictionary(self, available_resources): """Returns a dictionary of options which will be serialized to a yaml file and passed to YANK. Parameters ---------- available_resources: ComputeResources The resources available to execute on. Returns ------- dict of str and Any A yaml compatible dictionary of YANK options. """ from openforcefield.utils import quantity_to_string platform_name = "CPU" if available_resources.number_of_gpus > 0: # A platform which runs on GPUs has been requested. from evaluator.backends import ComputeResources toolkit_enum = ComputeResources.GPUToolkit( available_resources.preferred_gpu_toolkit) # A platform which runs on GPUs has been requested. platform_name = ("CUDA" if toolkit_enum == ComputeResources.GPUToolkit.CUDA else ComputeResources.GPUToolkit.OpenCL) return { "verbose": self.verbose, "output_dir": ".", "temperature": quantity_to_string( pint_quantity_to_openmm(self.thermodynamic_state.temperature)), "pressure": quantity_to_string( pint_quantity_to_openmm(self.thermodynamic_state.pressure)), "minimize": True, "number_of_equilibration_iterations": self.number_of_equilibration_iterations, "default_number_of_iterations": self.number_of_iterations, "default_nsteps_per_iteration": self.steps_per_iteration, "checkpoint_interval": self.checkpoint_interval, "default_timestep": quantity_to_string(pint_quantity_to_openmm(self.timestep)), "annihilate_electrostatics": True, "annihilate_sterics": False, "platform": platform_name, }
def setup_platform_with_resources(compute_resources, high_precision=False): """Creates an OpenMM `Platform` object which requests a set amount of compute resources (e.g with a certain number of cpus). Parameters ---------- compute_resources: ComputeResources The compute resources which describe which platform is most appropriate. high_precision: bool If true, a platform with the highest possible precision (double for CUDA and OpenCL, Reference for CPU only) will be returned. Returns ------- Platform The created platform """ from simtk.openmm import Platform # Setup the requested platform: if compute_resources.number_of_gpus > 0: # TODO: Make sure use mixing precision - CUDA, OpenCL. # TODO: Deterministic forces = True from evaluator.backends import ComputeResources toolkit_enum = ComputeResources.GPUToolkit( compute_resources.preferred_gpu_toolkit ) # A platform which runs on GPUs has been requested. platform_name = ( "CUDA" if toolkit_enum == ComputeResources.GPUToolkit.CUDA else ComputeResources.GPUToolkit.OpenCL ) # noinspection PyCallByClass,PyTypeChecker platform = Platform.getPlatformByName(platform_name) if compute_resources.gpu_device_indices is not None: property_platform_name = platform_name if toolkit_enum == ComputeResources.GPUToolkit.CUDA: property_platform_name = platform_name.lower().capitalize() platform.setPropertyDefaultValue( property_platform_name + "DeviceIndex", compute_resources.gpu_device_indices, ) if high_precision: platform.setPropertyDefaultValue("Precision", "double") logger.info( "Setting up an openmm platform on GPU {}".format( compute_resources.gpu_device_indices or 0 ) ) else: if not high_precision: # noinspection PyCallByClass,PyTypeChecker platform = Platform.getPlatformByName("CPU") platform.setPropertyDefaultValue( "Threads", str(compute_resources.number_of_threads) ) else: # noinspection PyCallByClass,PyTypeChecker platform = Platform.getPlatformByName("Reference") logger.info( "Setting up a simulation with {} threads".format( compute_resources.number_of_threads ) ) return platform
def _setup_simulation_objects(self, temperature, pressure, available_resources): """Initializes the objects needed to perform the simulation. This comprises of a context, and an integrator. Parameters ---------- temperature: simtk.pint.Quantity The temperature to run the simulation at. pressure: simtk.pint.Quantity The pressure to run the simulation at. available_resources: ComputeResources The resources available to run on. Returns ------- simtk.openmm.Context The created openmm context which takes advantage of the available compute resources. openmmtools.integrators.LangevinIntegrator The Langevin integrator which will propogate the simulation. """ import openmmtools from simtk.openmm import XmlSerializer # Create a platform with the correct resources. if not self.allow_gpu_platforms: from evaluator.backends import ComputeResources available_resources = ComputeResources( available_resources.number_of_threads) platform = setup_platform_with_resources(available_resources, self.high_precision) # Load in the system object from the provided xml file. with open(self.system_path, "r") as file: system = XmlSerializer.deserialize(file.read()) # Disable the periodic boundary conditions if requested. if not self.enable_pbc: disable_pbc(system) pressure = None # Use the openmmtools ThermodynamicState object to help # set up a system which contains the correct barostat if # one should be present. openmm_state = openmmtools.states.ThermodynamicState( system=system, temperature=temperature, pressure=pressure) system = openmm_state.get_system(remove_thermostat=True) # Set up the integrator. thermostat_friction = pint_quantity_to_openmm(self.thermostat_friction) timestep = pint_quantity_to_openmm(self.timestep) integrator = openmmtools.integrators.LangevinIntegrator( temperature=temperature, collision_rate=thermostat_friction, timestep=timestep, ) # Create the simulation context. context = openmm.Context(system, integrator, platform) # Initialize the context with the correct positions etc. input_pdb_file = app.PDBFile(self.input_coordinate_file) if self.enable_pbc: # Optionally set up the box vectors. box_vectors = input_pdb_file.topology.getPeriodicBoxVectors() if box_vectors is None: raise ValueError( "The input file must contain box vectors when running with PBC." ) context.setPeriodicBoxVectors(*box_vectors) context.setPositions(input_pdb_file.positions) context.setVelocitiesToTemperature(temperature) return context, integrator