Exemplo n.º 1
0
def test_average_free_energies_protocol():
    """Tests adding together two free energies."""

    delta_g_one = Observable(
        value=(-10.0 * unit.kilocalorie / unit.mole).plus_minus(
            1.0 * unit.kilocalorie / unit.mole),
        gradients=[
            ParameterGradient(
                key=ParameterGradientKey("vdW", "[#6:1]", "sigma"),
                value=0.1 * unit.kilocalorie / unit.mole / unit.angstrom,
            )
        ],
    )
    delta_g_two = Observable(
        value=(-20.0 * unit.kilocalorie / unit.mole).plus_minus(
            2.0 * unit.kilocalorie / unit.mole),
        gradients=[
            ParameterGradient(
                key=ParameterGradientKey("vdW", "[#6:1]", "sigma"),
                value=0.2 * unit.kilocalorie / unit.mole / unit.angstrom,
            )
        ],
    )

    thermodynamic_state = ThermodynamicState(298 * unit.kelvin,
                                             1 * unit.atmosphere)

    sum_protocol = AverageFreeEnergies("")

    sum_protocol.values = [delta_g_one, delta_g_two]
    sum_protocol.thermodynamic_state = thermodynamic_state

    sum_protocol.execute()

    result_value = sum_protocol.result.value.to(unit.kilocalorie / unit.mole)
    result_uncertainty = sum_protocol.result.error.to(unit.kilocalorie /
                                                      unit.mole)

    assert isinstance(sum_protocol.result, Observable)
    assert result_value.magnitude == pytest.approx(-20.0, abs=0.2)
    assert result_uncertainty.magnitude == pytest.approx(2.0, abs=0.2)

    assert (sum_protocol.confidence_intervals[0] > result_value >
            sum_protocol.confidence_intervals[1])

    gradient_value = sum_protocol.result.gradients[0].value.to(
        unit.kilocalorie / unit.mole / unit.angstrom)
    beta = 1.0 / (298.0 * unit.kelvin * unit.molar_gas_constant).to(
        unit.kilocalorie / unit.mole)

    assert np.isclose(
        gradient_value.magnitude,
        (0.1 * np.exp(-beta.magnitude * -10.0) +
         0.2 * np.exp(-beta.magnitude * -20.0)) /
        (np.exp(-beta.magnitude * -10.0) + np.exp(-beta.magnitude * -20.0)),
    )
Exemplo n.º 2
0
def test_observable_initializer(value, gradients, expected_raises,
                                expected_message):

    with expected_raises as error_info:
        Observable(value, gradients)

    if expected_message is not None:
        assert expected_message in str(error_info.value)
Exemplo n.º 3
0
    def bootstrap_function(values: ObservableArray) -> Observable:

        return Observable(
            value=values.value.mean().plus_minus(0.0 * values.value.units),
            gradients=[
                ParameterGradient(gradient.key, numpy.mean(gradient.value))
                for gradient in values.gradients
            ],
        )
Exemplo n.º 4
0
    def _execute(self, directory, available_resources):

        from paprika import analyze

        # Set-up the expected directory structure.
        windows_directory = os.path.join(directory, "windows")
        os.makedirs(windows_directory, exist_ok=True)

        window_phase = {"attach": "a", "pull": "p", "release": "r"}[self.phase]

        for window_index, trajectory_path in enumerate(self.trajectory_paths):

            # Create a directory to link the trajectory into.
            window_directory = f"{window_phase}{str(window_index).zfill(3)}"
            os.makedirs(os.path.join(windows_directory, window_directory),
                        exist_ok=True)

            # Sym-link the trajectory into the new directory to avoid copying
            # large trajectory files.
            destination_path = os.path.join(windows_directory,
                                            window_directory, "trajectory.dcd")
            if not os.path.isfile(destination_path):
                os.symlink(os.path.join(os.getcwd(), trajectory_path),
                           destination_path)

            # Also sym-link the topology path
            destination_path = os.path.join(windows_directory,
                                            window_directory, "topology.pdb")
            if not os.path.isfile(destination_path):
                os.symlink(os.path.join(os.getcwd(), self.topology_path),
                           destination_path)

        restraints = ApplyRestraints.load_restraints(self.restraints_path)

        flat_restraints = [
            restraint for restraint_type in restraints
            for restraint in restraints[restraint_type]
        ]

        results = analyze.compute_phase_free_energy(
            phase=self.phase,
            restraints=flat_restraints,
            windows_directory=windows_directory,
            topology_name="topology.pdb",
            analysis_method="ti-block",
        )

        multiplier = {"attach": -1.0, "pull": -1.0, "release": 1.0}[self.phase]

        self.result = Observable(
            unit.Measurement(
                multiplier * results[self.phase]["ti-block"]["fe"] *
                unit.kilocalorie / unit.mole,
                results[self.phase]["ti-block"]["sem"] * unit.kilocalorie /
                unit.mole,
            ))
Exemplo n.º 5
0
    def _execute(self, directory, available_resources):

        from paprika.evaluator import Analyze

        self.result = Observable(
            unit.Measurement(
                Analyze.symmetry_correction(
                    self.n_microstates,
                    self.thermodynamic_state.temperature.to(
                        unit.kelvin).magnitude,
                ) * unit.kilocalorie / unit.mole,
                0 * unit.kilocalorie / unit.mole,
            ))
Exemplo n.º 6
0
    def _execute(self, directory, available_resources):

        force_field_source = ForceFieldSource.from_json(self.force_field_path)

        if not isinstance(force_field_source, SmirnoffForceFieldSource):
            raise ValueError("Only SMIRNOFF force fields are supported.")

        force_field = force_field_source.to_force_field()

        parameter_units = {
            gradient_key: openmm_quantity_to_pint(
                getattr(
                    force_field.get_parameter_handler(
                        gradient_key.tag).parameters[gradient_key.smirks],
                    gradient_key.attribute,
                )).units
            for gradient_key in self.gradient_parameters
        }

        self.input_observables.clear_gradients()

        if isinstance(self.input_observables, Observable):

            self.output_observables = Observable(
                value=self.input_observables.value,
                gradients=[
                    ParameterGradient(
                        key=gradient_key,
                        value=(0.0 * self.input_observables.value.units /
                               parameter_units[gradient_key]),
                    ) for gradient_key in self.gradient_parameters
                ],
            )

        elif isinstance(self.input_observables, ObservableArray):

            self.output_observables = ObservableArray(
                value=self.input_observables.value,
                gradients=[
                    ParameterGradient(
                        key=gradient_key,
                        value=(
                            numpy.zeros(self.input_observables.value.shape) *
                            self.input_observables.value.units /
                            parameter_units[gradient_key]),
                    ) for gradient_key in self.gradient_parameters
                ],
            )

        else:
            raise NotImplementedError()
Exemplo n.º 7
0
    def _execute(self, directory, available_resources):

        from paprika.evaluator import Analyze

        restraints = ApplyRestraints.load_restraints(self.restraints_path)
        guest_restraints = restraints["guest"]

        self.result = Observable(
            unit.Measurement(
                -Analyze.compute_ref_state_work(
                    self.thermodynamic_state.temperature.to(
                        unit.kelvin).magnitude,
                    guest_restraints,
                ) * unit.kilocalorie / unit.mole,
                0 * unit.kilocalorie / unit.mole,
            ))
Exemplo n.º 8
0
def test_observable_round_trip():

    observable = Observable(
        value=(0.1 * unit.kelvin).plus_minus(0.2 * unit.kelvin),
        gradients=[
            ParameterGradient(
                key=ParameterGradientKey("vdW", "[#6:1]", "epsilon"),
                value=0.2 * unit.kelvin,
            )
        ],
    )

    round_tripped: Observable = json.loads(json.dumps(observable,
                                                      cls=TypedJSONEncoder),
                                           cls=TypedJSONDecoder)

    assert isinstance(round_tripped, Observable)

    assert numpy.isclose(observable.value, round_tripped.value)
    assert numpy.isclose(observable.error, round_tripped.error)

    assert len(observable.gradients) == len(round_tripped.gradients)
    assert observable.gradients[0] == round_tripped.gradients[0]
Exemplo n.º 9
0
    def _execute(self, directory, available_resources):

        from scipy.special import logsumexp

        default_unit = unit.kilocalorie / unit.mole

        boltzmann_factor = (
            self.thermodynamic_state.temperature * unit.molar_gas_constant
        )
        boltzmann_factor.ito(default_unit)

        beta = 1.0 / boltzmann_factor

        values = [
            (-beta * value.value.to(default_unit)).to(unit.dimensionless).magnitude
            for value in self.values
        ]

        # Compute the mean.
        mean = logsumexp(values)

        # Compute the gradients of the mean.
        value_gradients = [
            {gradient.key: -beta * gradient.value for gradient in value.gradients}
            for value in self.values
        ]
        value_gradients_by_key = {
            gradient_key: [
                gradients_by_key[gradient_key] for gradients_by_key in value_gradients
            ]
            for gradient_key in value_gradients[0]
        }

        mean_gradients = []

        for gradient_key, gradient_values in value_gradients_by_key.items():

            expected_unit = value_gradients[0][gradient_key].units

            d_log_mean_numerator, d_mean_numerator_sign = logsumexp(
                values,
                b=[x.to(expected_unit).magnitude for x in gradient_values],
                return_sign=True,
            )
            d_mean_numerator = d_mean_numerator_sign * np.exp(d_log_mean_numerator)

            d_mean_d_theta = d_mean_numerator / np.exp(mean)

            mean_gradients.append(
                ParameterGradient(
                    key=gradient_key,
                    value=-boltzmann_factor * d_mean_d_theta * expected_unit,
                )
            )

        # Compute the standard error and 95% CI
        cycle_result = np.empty(self.bootstrap_cycles)

        for cycle_index, cycle in enumerate(range(self.bootstrap_cycles)):

            cycle_values = np.empty(len(self.values))

            for value_index, value in enumerate(self.values):

                cycle_mean = value.value.to(default_unit).magnitude
                cycle_sem = value.error.to(default_unit).magnitude

                sampled_value = np.random.normal(cycle_mean, cycle_sem) * default_unit
                cycle_values[value_index] = (
                    (-beta * sampled_value).to(unit.dimensionless).magnitude
                )

            # ΔG° = -RT × Log[ Σ_{n} exp(-βΔG°_{n}) ]
            cycle_result[cycle_index] = logsumexp(cycle_values)

        mean = -boltzmann_factor * mean
        sem = np.std(-boltzmann_factor * cycle_result)

        confidence_intervals = np.empty(2)
        sorted_statistics = np.sort(cycle_result)

        confidence_intervals[0] = sorted_statistics[int(0.025 * self.bootstrap_cycles)]
        confidence_intervals[1] = sorted_statistics[int(0.975 * self.bootstrap_cycles)]

        confidence_intervals = -boltzmann_factor * confidence_intervals

        self.result = Observable(value=mean.plus_minus(sem), gradients=mean_gradients)
        self.confidence_intervals = confidence_intervals
Exemplo n.º 10
0
def compute_dielectric_constant(
    dipole_moments: ObservableArray,
    volumes: ObservableArray,
    temperature: unit.Quantity,
    average_function,
) -> Observable:
    """A function to compute the average dielectric constant from an array of
    dipole moments and an array of volumes, whereby the average values of the
    observables are computed using a custom function.

    Parameters
    ----------
    dipole_moments
        The dipole moments array.
    volumes
        The volume array.
    temperature
        The temperature at which the dipole_moments and volumes were sampled.
    average_function
        The function to use when evaluating the average of an observable.

    Returns
    -------
        The average value of the dielectric constant.
    """

    dipole_moments_sqr = dipole_moments * dipole_moments
    dipole_moments_sqr = ObservableArray(
        value=dipole_moments_sqr.value.sum(axis=1),
        gradients=[
            ParameterGradient(gradient.key, gradient.value.sum(axis=1))
            for gradient in dipole_moments_sqr.gradients
        ],
    )

    avg_sqr_dipole_moments = average_function(observable=dipole_moments_sqr)
    avg_sqr_dipole_moments = ObservableArray(
        avg_sqr_dipole_moments.value, avg_sqr_dipole_moments.gradients
    )

    avg_dipole_moment = average_function(observable=dipole_moments)

    avg_dipole_moment_sqr = avg_dipole_moment * avg_dipole_moment
    avg_dipole_moment_sqr = ObservableArray(
        value=avg_dipole_moment_sqr.value.sum(axis=1),
        gradients=[
            ParameterGradient(gradient.key, gradient.value.sum(axis=1))
            for gradient in avg_dipole_moment_sqr.gradients
        ],
    )

    avg_volume = average_function(observable=volumes)
    avg_volume = ObservableArray(avg_volume.value, avg_volume.gradients)

    dipole_variance = avg_sqr_dipole_moments - avg_dipole_moment_sqr

    prefactor = 1.0 / (3.0 * E0 * unit.boltzmann_constant * temperature)

    dielectric_constant = 1.0 * unit.dimensionless + prefactor * (
        dipole_variance / avg_volume
    )

    return Observable(
        value=dielectric_constant.value.item().to(unit.dimensionless),
        gradients=[
            ParameterGradient(
                gradient.key,
                gradient.value.item().to(
                    unit.dimensionless
                    * gradient.value.units
                    / dielectric_constant.value.units
                ),
            )
            for gradient in dielectric_constant.gradients
        ],
    )
def test_workflow_layer():
    """Test the `WorkflowLayer` calculation layer. As the `SimulationLayer`
    is the simplest implementation of the abstract layer, we settle for
    testing this."""

    properties_to_estimate = [
        create_dummy_property(Density),
        create_dummy_property(Density),
    ]

    # Create a very simple workflow which just returns some placeholder
    # value.
    estimated_value = Observable(
        (1 * unit.kelvin).plus_minus(0.1 * unit.kelvin))
    protocol_a = DummyProtocol("protocol_a")
    protocol_a.input_value = estimated_value

    schema = WorkflowSchema()
    schema.protocol_schemas = [protocol_a.schema]
    schema.final_value_source = ProtocolPath("output_value", protocol_a.id)

    layer_schema = SimulationSchema()
    layer_schema.workflow_schema = schema

    options = RequestOptions()
    options.add_schema("SimulationLayer", "Density", layer_schema)

    batch = server.Batch()
    batch.queued_properties = properties_to_estimate
    batch.options = options

    with tempfile.TemporaryDirectory() as directory:

        with temporarily_change_directory(directory):

            # Create a directory for the layer.
            layer_directory = "simulation_layer"
            os.makedirs(layer_directory)

            # Set-up a simple storage backend and add a force field to it.
            force_field = SmirnoffForceFieldSource.from_path(
                "smirnoff99Frosst-1.1.0.offxml")

            storage_backend = LocalFileStorage()
            batch.force_field_id = storage_backend.store_force_field(
                force_field)

            # Create a simple calculation backend to test with.
            with DaskLocalCluster() as calculation_backend:

                def dummy_callback(returned_request):

                    assert len(returned_request.estimated_properties) == 2
                    assert len(returned_request.exceptions) == 0

                simulation_layer = SimulationLayer()

                simulation_layer.schedule_calculation(
                    calculation_backend,
                    storage_backend,
                    layer_directory,
                    batch,
                    dummy_callback,
                    True,
                )