def test_thermoml_mole_constraints():
    """A collection of tests to ensure that the Mole fraction constraint is
    implemented correctly alongside solvent constraints."""

    # Mole fraction
    data_set = ThermoMLDataSet.from_file(
        get_data_filename("test/properties/mole.xml"))

    assert data_set is not None
    assert len(data_set) > 0

    # Mole fraction + Solvent: Mass fraction
    data_set = ThermoMLDataSet.from_file(
        get_data_filename("test/properties/mole_mass.xml"))

    assert data_set is not None
    assert len(data_set) > 0

    # Mole fraction + Solvent: Mole fraction
    data_set = ThermoMLDataSet.from_file(
        get_data_filename("test/properties/mole_mole.xml"))

    assert data_set is not None
    assert len(data_set) > 0

    # Mole fraction + Solvent: Molality
    data_set = ThermoMLDataSet.from_file(
        get_data_filename("test/properties/mole_molality.xml"))

    assert data_set is not None
    assert len(data_set) > 0
def test_thermoml_from_files():
    """A test to ensure that ThermoML archive files can be loaded from local sources."""

    data_set = ThermoMLDataSet.from_file(
        get_data_filename("properties/single_density.xml"),
        get_data_filename("properties/single_dielectric.xml"),
        get_data_filename("properties/single_enthalpy_mixing.xml"),
    )

    assert data_set is not None
    assert len(data_set) == 3

    # Make sure the DOI was found from the enthalpy file
    for physical_property in data_set:

        if isinstance(physical_property, EnthalpyOfMixing):

            assert physical_property.source.doi != UNDEFINED
            assert physical_property.source.doi == "10.1016/j.jct.2008.12.004"

        else:
            assert physical_property.source.doi == ""
            assert physical_property.source.reference != UNDEFINED

    data_set = ThermoMLDataSet.from_file("dummy_filename")
    assert data_set is None
Beispiel #3
0
def test_compute_dipole_moments(tmpdir):

    coordinate_path = get_data_filename("test/trajectories/water.pdb")
    trajectory_path = get_data_filename("test/trajectories/water.dcd")

    # Build a system object for water
    force_field_path = os.path.join(tmpdir, "ff.json")

    with open(force_field_path, "w") as file:
        file.write(build_tip3p_smirnoff_force_field().json())

    assign_parameters = BuildSmirnoffSystem("")
    assign_parameters.force_field_path = force_field_path
    assign_parameters.coordinate_file_path = coordinate_path
    assign_parameters.substance = Substance.from_components("O")
    assign_parameters.execute(str(tmpdir))

    # TODO - test gradients when TIP3P library charges added.
    protocol = ComputeDipoleMoments("")
    protocol.parameterized_system = assign_parameters.parameterized_system
    protocol.trajectory_path = trajectory_path
    protocol.execute(str(tmpdir))

    assert len(protocol.dipole_moments) == 10
    assert protocol.dipole_moments.value.shape[1] == 3

    assert not np.allclose(protocol.dipole_moments.value,
                           0.0 * unit.elementary_charge * unit.nanometers)
Beispiel #4
0
def test_statistics_object():

    statistics_object = StatisticsArray.from_openmm_csv(
        get_data_filename("test/statistics/stats_openmm.csv"), 1 * unit.atmosphere
    )

    statistics_object.to_pandas_csv("stats_pandas.csv")

    statistics_object = StatisticsArray.from_pandas_csv("stats_pandas.csv")
    assert statistics_object is not None

    subsampled_array = StatisticsArray.from_existing(statistics_object, [1, 2, 3])
    assert subsampled_array is not None and len(subsampled_array) == 3

    if os.path.isfile("stats_pandas.csv"):
        os.unlink("stats_pandas.csv")

    reduced_potential = np.array([0.1] * (len(statistics_object) - 1))

    with pytest.raises(ValueError):
        statistics_object[ObservableType.ReducedPotential] = reduced_potential

    reduced_potential = np.array([0.1] * len(statistics_object))

    with pytest.raises(ValueError):
        statistics_object[ObservableType.ReducedPotential] = reduced_potential

    statistics_object[ObservableType.ReducedPotential] = (
        reduced_potential * unit.dimensionless
    )

    assert ObservableType.ReducedPotential in statistics_object
Beispiel #5
0
def test_build_docked_coordinates_protocol():
    """Tests docking a methanol molecule into alpha-Cyclodextrin."""

    if not has_openeye():
        pytest.skip("The `BuildDockedCoordinates` protocol requires OpenEye.")

    ligand_substance = Substance()
    ligand_substance.add_component(
        Component("CO", role=Component.Role.Ligand),
        ExactAmount(1),
    )

    # TODO: This test could likely be made substantially faster
    #       by storing the binary prepared receptor. Would this
    #       be in breach of any oe license terms?
    with tempfile.TemporaryDirectory() as temporary_directory:

        build_docked_coordinates = BuildDockedCoordinates("build_methanol")
        build_docked_coordinates.ligand_substance = ligand_substance
        build_docked_coordinates.number_of_ligand_conformers = 5
        build_docked_coordinates.receptor_coordinate_file = get_data_filename(
            "test/molecules/acd.mol2")
        build_docked_coordinates.execute(temporary_directory,
                                         ComputeResources())

        docked_pdb = PDBFile(
            build_docked_coordinates.docked_complex_coordinate_path)
        assert docked_pdb.topology.getNumResidues() == 2
def complex_file_path(tmp_path):

    import parmed.geometry
    from paprika.evaluator import Setup

    complex_path = get_data_filename(
        os.path.join("test", "molecules", "methanol_methane.pdb")
    )

    # noinspection PyTypeChecker
    structure: parmed.Structure = parmed.load_file(complex_path, structure=True)
    # noinspection PyTypeChecker
    center_of_mass = parmed.geometry.center_of_mass(
        structure.coordinates, masses=numpy.ones(len(structure.coordinates))
    )

    Setup.add_dummy_atoms_to_structure(
        structure,
        [
            numpy.array([0.0, 0.0, 10.0]),
            numpy.array([0.0, 0.0, 20.0]),
            numpy.array([0.0, 5.0, 25.0]),
        ],
        center_of_mass,
    )

    complex_path = os.path.join(tmp_path, "complex.pdb")
    structure.save(complex_path)

    return complex_path
Beispiel #7
0
def test_decorrelate_trajectory():

    import mdtraj

    coordinate_path = get_data_filename("test/trajectories/water.pdb")
    trajectory_path = get_data_filename("test/trajectories/water.dcd")

    with tempfile.TemporaryDirectory() as temporary_directory:

        protocol = DecorrelateTrajectory("")
        protocol.input_coordinate_file = coordinate_path
        protocol.input_trajectory_path = trajectory_path
        protocol.time_series_statistics = TimeSeriesStatistics(10, 4, 2.0, 2)
        protocol.execute(temporary_directory)

        final_trajectory = mdtraj.load(protocol.output_trajectory_path,
                                       top=coordinate_path)
        assert len(final_trajectory) == 4
Beispiel #8
0
def test_extract_average_statistic():

    statistics_path = get_data_filename("test/statistics/stats_pandas.csv")

    with tempfile.TemporaryDirectory() as temporary_directory:

        extract_protocol = ExtractAverageStatistic("extract_protocol")
        extract_protocol.statistics_path = statistics_path
        extract_protocol.statistics_type = ObservableType.PotentialEnergy
        extract_protocol.execute(temporary_directory, ComputeResources())
Beispiel #9
0
def test_calculate_reduced_potential_openmm():

    substance = Substance.from_components("O")
    thermodynamic_state = ThermodynamicState(298 * unit.kelvin,
                                             1.0 * unit.atmosphere)

    with tempfile.TemporaryDirectory() as directory:
        force_field_path = path.join(directory, "ff.json")

        with open(force_field_path, "w") as file:
            file.write(build_tip3p_smirnoff_force_field().json())

        build_coordinates = BuildCoordinatesPackmol("build_coordinates")
        build_coordinates.max_molecules = 10
        build_coordinates.mass_density = 0.05 * unit.grams / unit.milliliters
        build_coordinates.substance = substance
        build_coordinates.execute(directory, None)

        assign_parameters = BuildSmirnoffSystem("assign_parameters")
        assign_parameters.force_field_path = force_field_path
        assign_parameters.coordinate_file_path = build_coordinates.coordinate_file_path
        assign_parameters.substance = substance
        assign_parameters.execute(directory, None)

        reduced_potentials = OpenMMReducedPotentials("reduced_potentials")
        reduced_potentials.substance = substance
        reduced_potentials.thermodynamic_state = thermodynamic_state
        reduced_potentials.reference_force_field_paths = [force_field_path]
        reduced_potentials.system_path = assign_parameters.system_path
        reduced_potentials.trajectory_file_path = get_data_filename(
            "test/trajectories/water.dcd")
        reduced_potentials.coordinate_file_path = get_data_filename(
            "test/trajectories/water.pdb")
        reduced_potentials.kinetic_energies_path = get_data_filename(
            "test/statistics/stats_pandas.csv")
        reduced_potentials.high_precision = False
        reduced_potentials.execute(directory, ComputeResources())

        assert path.isfile(reduced_potentials.statistics_file_path)

        final_array = StatisticsArray.from_pandas_csv(
            reduced_potentials.statistics_file_path)
        assert ObservableType.ReducedPotential in final_array
def test_compute_gradients(tmpdir, smirks, all_zeros):

    # Load a short trajectory.
    coordinate_path = get_data_filename("test/trajectories/water.pdb")
    trajectory_path = get_data_filename("test/trajectories/water.dcd")

    trajectory = mdtraj.load_dcd(trajectory_path, coordinate_path)

    observables = ObservableFrame({
        "PotentialEnergy":
        ObservableArray(
            np.zeros(len(trajectory)) * unit.kilojoule / unit.mole)
    })

    _compute_gradients(
        [ParameterGradientKey("vdW", smirks, "epsilon")],
        observables,
        ForceField("openff-1.2.0.offxml"),
        ThermodynamicState(298.15 * unit.kelvin, 1.0 * unit.atmosphere),
        Topology.from_mdtraj(trajectory.topology, [Molecule.from_smiles("O")]),
        trajectory,
        ComputeResources(),
        True,
    )

    assert len(
        observables["PotentialEnergy"].gradients[0].value) == len(trajectory)

    if all_zeros:
        assert np.allclose(
            observables["PotentialEnergy"].gradients[0].value,
            0.0 * unit.kilojoule / unit.kilocalorie,
        )
    else:
        assert not np.allclose(
            observables["PotentialEnergy"].gradients[0].value,
            0.0 * unit.kilojoule / unit.kilocalorie,
        )
Beispiel #11
0
def test_concatenate_trajectories():

    import mdtraj

    coordinate_path = get_data_filename("test/trajectories/water.pdb")
    trajectory_path = get_data_filename("test/trajectories/water.dcd")

    original_trajectory = mdtraj.load(trajectory_path, top=coordinate_path)

    with tempfile.TemporaryDirectory() as temporary_directory:

        concatenate_protocol = ConcatenateTrajectories("concatenate_protocol")
        concatenate_protocol.input_coordinate_paths = [
            coordinate_path, coordinate_path
        ]
        concatenate_protocol.input_trajectory_paths = [
            trajectory_path, trajectory_path
        ]
        concatenate_protocol.execute(temporary_directory, ComputeResources())

        final_trajectory = mdtraj.load(
            concatenate_protocol.output_trajectory_path, top=coordinate_path)
        assert len(final_trajectory) == len(original_trajectory) * 2
Beispiel #12
0
def test_extract_uncorrelated_trajectory_data():

    import mdtraj

    coordinate_path = get_data_filename("test/trajectories/water.pdb")
    trajectory_path = get_data_filename("test/trajectories/water.dcd")

    original_trajectory = mdtraj.load(trajectory_path, top=coordinate_path)

    with tempfile.TemporaryDirectory() as temporary_directory:

        extract_protocol = ExtractUncorrelatedTrajectoryData(
            "extract_protocol")
        extract_protocol.input_coordinate_file = coordinate_path
        extract_protocol.input_trajectory_path = trajectory_path
        extract_protocol.equilibration_index = 2
        extract_protocol.statistical_inefficiency = 2.0
        extract_protocol.execute(temporary_directory, ComputeResources())

        final_trajectory = mdtraj.load(extract_protocol.output_trajectory_path,
                                       top=coordinate_path)
        assert len(final_trajectory) == (len(original_trajectory) - 2) / 2
        assert (extract_protocol.number_of_uncorrelated_samples ==
                (len(original_trajectory) - 2) / 2)
Beispiel #13
0
def test_concatenate_statistics():

    statistics_path = get_data_filename("test/statistics/stats_pandas.csv")
    original_array = StatisticsArray.from_pandas_csv(statistics_path)

    with tempfile.TemporaryDirectory() as temporary_directory:

        concatenate_protocol = ConcatenateStatistics("concatenate_protocol")
        concatenate_protocol.input_statistics_paths = [
            statistics_path, statistics_path
        ]
        concatenate_protocol.execute(temporary_directory, ComputeResources())

        final_array = StatisticsArray.from_pandas_csv(
            concatenate_protocol.output_statistics_path)
        assert len(final_array) == len(original_array) * 2
def test_prepare_release_coordinates(tmp_path, dummy_complex):
    import mdtraj

    protocol = PrepareReleaseCoordinates("")

    protocol.substance = dummy_complex
    protocol.complex_file_path = get_data_filename(
        os.path.join("test", "molecules", "methanol_methane.pdb")
    )

    protocol.execute(str(tmp_path))

    assert os.path.isfile(protocol.output_coordinate_path)

    host_trajectory = mdtraj.load_pdb(protocol.output_coordinate_path)
    assert host_trajectory.topology.n_atoms == 6
def test_atom_indices_by_role(dummy_complex):

    atom_indices_by_role = _atom_indices_by_role(
        dummy_complex,
        get_data_filename(os.path.join("test", "molecules", "methanol_methane.pdb")),
    )

    assert len(atom_indices_by_role) == 2

    assert Component.Role.Ligand in atom_indices_by_role
    assert Component.Role.Receptor in atom_indices_by_role

    assert len(atom_indices_by_role[Component.Role.Receptor]) == 6
    assert atom_indices_by_role[Component.Role.Receptor] == [0, 1, 2, 3, 4, 5]

    assert len(atom_indices_by_role[Component.Role.Ligand]) == 5
    assert atom_indices_by_role[Component.Role.Ligand] == [6, 7, 8, 9, 10]
Beispiel #16
0
def test_extract_uncorrelated_statistics_data():

    statistics_path = get_data_filename("test/statistics/stats_pandas.csv")
    original_array = StatisticsArray.from_pandas_csv(statistics_path)

    with tempfile.TemporaryDirectory() as temporary_directory:

        extract_protocol = ExtractUncorrelatedStatisticsData(
            "extract_protocol")
        extract_protocol.input_statistics_path = statistics_path
        extract_protocol.equilibration_index = 2
        extract_protocol.statistical_inefficiency = 2.0
        extract_protocol.execute(temporary_directory, ComputeResources())

        final_array = StatisticsArray.from_pandas_csv(
            extract_protocol.output_statistics_path)
        assert len(final_array) == (len(original_array) - 2) / 2
        assert (extract_protocol.number_of_uncorrelated_samples ==
                (len(original_array) - 2) / 2)
def test_evaluate_energies_openmm():

    substance = Substance.from_components("O")
    thermodynamic_state = ThermodynamicState(298 * unit.kelvin,
                                             1.0 * unit.atmosphere)

    with tempfile.TemporaryDirectory() as directory:

        coordinate_path, parameterized_system = _setup_dummy_system(directory)

        reduced_potentials = OpenMMEvaluateEnergies("")
        reduced_potentials.substance = substance
        reduced_potentials.thermodynamic_state = thermodynamic_state
        reduced_potentials.parameterized_system = parameterized_system
        reduced_potentials.trajectory_file_path = get_data_filename(
            "test/trajectories/water.dcd")
        reduced_potentials.execute(directory, ComputeResources())

        assert ObservableType.ReducedPotential in reduced_potentials.output_observables
        assert ObservableType.PotentialEnergy in reduced_potentials.output_observables
Beispiel #18
0
def build_tip3p_smirnoff_force_field():
    """Combines the smirnoff99Frosst and tip3p offxml files
    into a single one which can be consumed by the property
    estimator.

    Returns
    -------
    SmirnoffForceFieldSource
        The force field containing both smirnoff99Frosst-1.1.0
        and TIP3P parameters
    """
    from openforcefield.typing.engines.smirnoff import ForceField

    smirnoff_force_field_path = "smirnoff99Frosst-1.1.0.offxml"
    tip3p_force_field_path = get_data_filename("forcefield/tip3p.offxml")

    smirnoff_force_field_with_tip3p = ForceField(smirnoff_force_field_path,
                                                 tip3p_force_field_path)

    return SmirnoffForceFieldSource.from_object(
        smirnoff_force_field_with_tip3p)
Beispiel #19
0
def test_import_thermoml_data(requests_mock):
    """Tests that ThermoML archive files can be imported from a
    remote source."""

    # Create a tarball to be downloaded.
    source_path = get_data_filename(
        os.path.join("test", "properties", "mass.xml"))

    with NamedTemporaryFile(suffix="tgz") as tar_file:

        with tarfile.open(tar_file.name, "w:gz") as tar:
            tar.add(source_path, arcname=os.path.basename(source_path))

        with open(tar_file.name, "rb") as file:

            requests_mock.get("https://trc.nist.gov/ThermoML/IJT.tgz",
                              content=file.read())

        data_frame = ImportThermoMLData.apply(
            pandas.DataFrame(),
            ImportThermoMLDataSchema(journal_names=["IJT"]))

        assert data_frame is not None and len(data_frame) == 1
def test_prepare_pull_coordinates(tmp_path, dummy_complex, window_index, expected_z):
    import mdtraj

    protocol = PreparePullCoordinates("")
    protocol.substance = dummy_complex
    protocol.complex_file_path = get_data_filename(
        os.path.join("test", "molecules", "methanol_methane.pdb")
    )
    protocol.guest_orientation_mask = "@7 @8"
    protocol.pull_distance = 24.0 * unit.angstrom
    protocol.pull_window_index = window_index
    protocol.n_pull_windows = 2

    protocol.execute(str(tmp_path))

    assert os.path.isfile(protocol.output_coordinate_path)

    host_trajectory = mdtraj.load_pdb(protocol.output_coordinate_path)
    assert host_trajectory.topology.n_atoms == 11

    assert numpy.allclose(
        host_trajectory.xyz[0][6, :], numpy.array([0.0, 0.0, expected_z])
    )
    assert numpy.allclose(host_trajectory.xyz[0][7, :2], numpy.zeros(2))
Beispiel #21
0
def test_frame_from_openmm(pressure):

    observable_frame = ObservableFrame.from_openmm(
        get_data_filename("test/statistics/openmm_statistics.csv"), pressure)

    expected_types = {*ObservableType} - {ObservableType.ReducedPotential}

    if pressure is None:
        expected_types -= {ObservableType.Enthalpy}

    assert {*observable_frame} == expected_types
    assert len(observable_frame) == 10

    expected_values = {
        ObservableType.PotentialEnergy:
        7934.831868494968 * unit.kilojoule / unit.mole,
        ObservableType.KineticEnergy:
        5939.683117957521 * unit.kilojoule / unit.mole,
        ObservableType.TotalEnergy:
        13874.51498645249 * unit.kilojoule / unit.mole,
        ObservableType.Temperature: 286.38157154881503 * unit.kelvin,
        ObservableType.Volume: 26.342326662784938 * unit.nanometer**3,
        ObservableType.Density:
        0.6139877476363793 * unit.gram / unit.milliliter,
    }

    for observable_type, expected_value in expected_values.items():
        assert numpy.isclose(observable_frame[observable_type].value[0],
                             expected_value)

    if pressure is not None:
        expected_enthalpy = (13874.51498645249 * unit.kilojoule / unit.mole +
                             pressure * 26.342326662784938 *
                             unit.nanometer**3 * unit.avogadro_constant)
        assert numpy.isclose(observable_frame["Enthalpy"].value[0],
                             expected_enthalpy)
Beispiel #22
0
def main():

    setup_timestamp_logging()

    # Retrieve the current version.
    version = evaluator.__version__.replace(".", "-").replace("v", "")

    if "+" in version:
        version = "latest"

    # Create a new directory to run the current versions results in.
    os.makedirs(os.path.join(version, "results"))

    with temporarily_change_directory(version):

        # Load in the force field
        force_field = ForceField(
            "openff-1.2.0.offxml",
            get_data_filename("forcefield/tip3p.offxml"),
        )

        force_field_source = SmirnoffForceFieldSource.from_object(force_field)
        force_field_source.json("force-field.json")

        # Load in the data set, retaining only a specific host / guest pair.
        binding_affinity = TaproomDataSet(
            host_codes=["acd"],
            guest_codes=["bam"],
            default_ionic_strength=150 * unit.millimolar,
        ).properties[0]

        # Set up the calculation
        schema = HostGuestBindingAffinity.default_paprika_schema(
            n_solvent_molecules=2000).workflow_schema
        schema.replace_protocol_types({
            "BaseBuildSystem": ("BuildSmirnoffSystem" if isinstance(
                force_field_source, SmirnoffForceFieldSource) else
                                "BuildTLeapSystem" if isinstance(
                                    force_field_source, TLeapForceFieldSource)
                                else "BaseBuildSystem")
        })

        metadata = Workflow.generate_default_metadata(binding_affinity,
                                                      "force-field.json",
                                                      UNDEFINED)

        workflow = Workflow.from_schema(schema, metadata, "acd_bam")

        # Run the calculation
        with DaskLSFBackend(
                minimum_number_of_workers=1,
                maximum_number_of_workers=50,
                resources_per_worker=QueueWorkerResources(
                    number_of_gpus=1,
                    preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA,
                    per_thread_memory_limit=5 * unit.gigabyte,
                    wallclock_time_limit="05:59",
                ),
                setup_script_commands=[
                    "conda activate openff-evaluator-paprika",
                    "module load cuda/10.0",
                ],
                queue_name="gpuqueue",
        ) as calculation_backend:

            results = workflow.execute(
                root_directory="workflow",
                calculation_backend=calculation_backend).result()

        # Save the results
        results.json("results.json", format=True)
def test_add_dummy_atoms(tmp_path, dummy_complex):

    import mdtraj
    from simtk import openmm
    from simtk import unit as simtk_unit

    # Create an empty system to add the dummy atoms to.
    system_path = os.path.join(tmp_path, "input.xml")

    system = openmm.System()
    system.addForce(openmm.NonbondedForce())

    with open(system_path, "w") as file:
        file.write(openmm.XmlSerializer.serialize(system))

    protocol = AddDummyAtoms("release_add_dummy_atoms")
    protocol.substance = dummy_complex
    protocol.input_coordinate_path = get_data_filename(
        os.path.join("test", "molecules", "methanol_methane.pdb")
    )
    protocol.input_system = ParameterizedSystem(
        substance=dummy_complex,
        force_field=None,
        topology_path=get_data_filename(
            os.path.join("test", "molecules", "methanol_methane.pdb")
        ),
        system_path=system_path,
    )
    protocol.offset = 6.0 * unit.angstrom
    protocol.execute(str(tmp_path))

    # Validate that dummy atoms have been added to the configuration file
    # and the structure has been correctly shifted.
    trajectory = mdtraj.load_pdb(protocol.output_coordinate_path)
    assert trajectory.topology.n_atoms == 14

    assert numpy.allclose(trajectory.xyz[0][11:12, :2], 2.5)
    assert numpy.isclose(trajectory.xyz[0][11, 2], 0.62)
    assert numpy.isclose(trajectory.xyz[0][12, 2], 0.32)
    assert numpy.isclose(trajectory.xyz[0][13, 0], 2.5)
    assert numpy.isclose(trajectory.xyz[0][13, 1], 2.72)
    assert numpy.isclose(trajectory.xyz[0][13, 2], 0.1)

    # Validate the atom / residue names.
    all_atoms = [*trajectory.topology.atoms]
    dummy_atoms = all_atoms[11:14]

    assert all(atom.name == "DUM" for atom in dummy_atoms)
    assert all(dummy_atoms[i].residue.name == f"DM{i + 1}" for i in range(3))

    # Validate that the dummy atoms got added to the system
    with open(protocol.output_system.system_path) as file:
        system: openmm.System = openmm.XmlSerializer.deserialize(file.read())

    assert system.getNumParticles() == 3
    assert all(
        numpy.isclose(system.getParticleMass(i).value_in_unit(simtk_unit.dalton), 207.0)
        for i in range(3)
    )

    assert system.getNumForces() == 1
    assert system.getForce(0).getNumParticles() == 3
Beispiel #24
0
"""A module to strip ``pint`` of its dynamic classes."""
import os
import uuid
import warnings

import pint
from pint.measurement import _Measurement
from pint.quantity import _Quantity
from pint.unit import _Unit

from openff.evaluator.utils import get_data_filename

DEFAULT_UNIT_REGISTRY = pint.UnitRegistry(
    get_data_filename(os.path.join("units", "defaults.txt")))


def _unpickle_quantity(cls, *args):
    """Rebuild quantity upon unpickling using the application registry."""
    return pint._unpickle(DEFAULT_UNIT_REGISTRY.Quantity, *args)


def _unpickle_unit(cls, *args):
    """Rebuild unit upon unpickling using the application registry."""
    return pint._unpickle(DEFAULT_UNIT_REGISTRY.Unit, *args)


def _unpickle_measurement(cls, *args):
    """Rebuild measurement upon unpickling using the application registry."""
    return pint._unpickle(DEFAULT_UNIT_REGISTRY.Measurement, *args)