예제 #1
0
    def _optimise(self, termination_params):
        NN = len(self.input_intensities)
        dim = self.target.dim
        n_sym_ops = len(self.target.get_sym_ops())
        coords = flex.random_double(NN * n_sym_ops * dim)

        import scitbx.lbfgs

        tp = termination_params
        termination_params = scitbx.lbfgs.termination_parameters(
            traditional_convergence_test=tp.traditional_convergence_test,
            traditional_convergence_test_eps=tp.traditional_convergence_test_eps,
            drop_convergence_test_n_test_points=tp.drop_convergence_test_n_test_points,
            drop_convergence_test_max_drop_eps=tp.drop_convergence_test_max_drop_eps,
            drop_convergence_test_iteration_coefficient=tp.drop_convergence_test_iteration_coefficient,
            # min_iterations=tp.min_iterations,
            max_iterations=tp.max_iterations,
            max_calls=tp.max_calls,
        )

        M = engine.lbfgs_with_curvs(
            self.target,
            coords,
            use_curvatures=self.params.use_curvatures,
            termination_params=termination_params,
        )
        self.minimizer = M

        coords = M.x.deep_copy()
        coords.reshape(flex.grid(dim, NN * n_sym_ops))
        coords.matrix_transpose_in_place()
        self.coords = coords
예제 #2
0
def test_cosym_target(space_group):
    datasets, expected_reindexing_ops = generate_test_data(
        space_group=sgtbx.space_group_info(symbol=space_group).group(),
        sample_size=50)

    intensities = datasets[0]
    dataset_ids = np.zeros(intensities.size() * len(datasets))
    for i, d in enumerate(datasets[1:]):
        i += 1
        intensities = intensities.concatenate(d,
                                              assert_is_similar_symmetry=False)
        dataset_ids[i * d.size():(i + 1) * d.size()] = np.full(d.size(),
                                                               i,
                                                               dtype=int)

    for weights in [None, "count", "standard_error"]:
        print(weights)
        t = target.Target(intensities, dataset_ids, weights=weights)
        m = len(t.sym_ops)
        n = len(datasets)
        assert t.dim == m
        assert t.rij_matrix.shape == (n * m, n * m)
        # x = np.random.rand(n * m * t.dim)
        x = flex.random_double(n * m * t.dim).as_numpy_array()
        f0 = t.compute_functional(x)
        g = t.compute_gradients(x)
        g_fd = t.compute_gradients_fd(x)
        np.testing.assert_allclose(g, g_fd, rtol=2e-3)
        c = t.curvatures(x)
        c_fd = t.curvatures_fd(x, eps=1e-3)
        assert list(c) == pytest.approx(c_fd, rel=0.8e-1)

        if weights == "count":
            # Absolute upper limit on weights
            assert t.wij_matrix.max() <= datasets[0].size()

        minimizer = engine.lbfgs_with_curvs(target=t, coords=x)
        # check functional has decreased and gradients are approximately zero
        f = t.compute_functional(minimizer.coords)
        g = t.compute_gradients(minimizer.coords)
        g_fd = t.compute_gradients_fd(minimizer.coords)
        assert f < f0
        assert pytest.approx(g, abs=1e-3) == [0] * len(g)
        assert pytest.approx(g_fd, abs=1e-3) == [0] * len(g)
예제 #3
0
def test_cosym_target(space_group):
    datasets, expected_reindexing_ops = generate_test_data(
        space_group=sgtbx.space_group_info(symbol=space_group).group(),
        sample_size=50)

    intensities = datasets[0]
    dataset_ids = flex.double(intensities.size(), 0)
    for i, d in enumerate(datasets[1:]):
        intensities = intensities.concatenate(d,
                                              assert_is_similar_symmetry=False)
        dataset_ids.extend(flex.double(d.size(), i + 1))

    for weights in [None, "count", "standard_error"]:
        print(weights)
        t = target.Target(intensities, dataset_ids, weights=weights)
        m = len(t.get_sym_ops())
        n = len(datasets)
        assert t.dim == m
        assert t.rij_matrix.all() == (n * m, n * m)
        x = flex.random_double(n * m * t.dim)
        f0, g = t.compute_functional_and_gradients(x)
        g_fd = t.compute_gradients_fd(x)
        for n, value in enumerate(zip(g, g_fd)):
            assert value[0] == pytest.approx(value[1], rel=2e-3), n

        c = t.curvatures(x)
        c_fd = t.curvatures_fd(x, eps=1e-3)
        assert list(c) == pytest.approx(c_fd, rel=0.8e-1)

        assert engine.lbfgs_with_curvs(target=t, coords=x)
        t.compute_functional(x)
        # check functional has decreased and gradients are approximately zero
        f, g = t.compute_functional_and_gradients(x)
        g_fd = t.compute_gradients_fd(x)
        assert f < f0
        assert pytest.approx(g, abs=1e-3) == [0] * len(g)
        assert pytest.approx(g_fd, abs=1e-3) == [0] * len(g)
예제 #4
0
def test_cosym_target(space_group):
    datasets, expected_reindexing_ops = generate_test_data(
        space_group=sgtbx.space_group_info(symbol=space_group).group())

    for weights in [None, 'count', 'standard_error']:

        t = target.Target(
            datasets,
            weights=weights,
        )
        m = len(t.get_sym_ops())
        n = len(datasets)
        assert t.dim == m
        assert t.rij_matrix.all() == (n * m, n * m)
        x = flex.random_double(n * m * t.dim)
        x_orig = x.deep_copy()
        f0, g = t.compute_functional_and_gradients(x)
        g_fd = t.compute_gradients_fd(x)
        assert g.all_approx_equal_relatively(g_fd, relative_error=1e-4)

        c = t.curvatures(x)
        c_fd = t.curvatures_fd(x, eps=1e-3)
        assert c.all_approx_equal_relatively(c_fd, relative_error=0.5e-1)

        M = engine.lbfgs_with_curvs(
            target=t,
            coords=x,
            verbose=False,
        )
        t.compute_functional(x)
        # check functional has decreased and gradients are approximately zero
        f, g = t.compute_functional_and_gradients(x)
        g_fd = t.compute_gradients_fd(x)
        assert f < f0
        assert g.all_approx_equal(0, 1e-3)
        assert g_fd.all_approx_equal(0, 1e-3)