Beispiel #1
0
def test_multiscaler_update_for_minimisation():
    """Test the multiscaler update_for_minimisation method."""

    p, e = (generated_param(), generated_exp(2))
    p.reflection_selection.method = "use_all"
    r1 = generated_refl(id_=0)
    r1["intensity.sum.value"] = r1["intensity"]
    r1["intensity.sum.variance"] = r1["variance"]
    r2 = generated_refl(id_=1)
    r2["intensity.sum.value"] = r2["intensity"]
    r2["intensity.sum.variance"] = r2["variance"]
    p.scaling_options.nproc = 2
    p.model = "physical"
    exp = create_scaling_model(p, e, [r1, r2])
    singlescaler1 = create_scaler(p, [exp[0]], [r1])
    singlescaler2 = create_scaler(p, [exp[1]], [r2])

    multiscaler = MultiScaler([singlescaler1, singlescaler2])
    pmg = ScalingParameterManagerGenerator(
        multiscaler.active_scalers,
        ScalingTarget,
        multiscaler.params.scaling_refinery.refinement_order,
    )
    multiscaler.single_scalers[0].components["scale"].parameters /= 2.0
    multiscaler.single_scalers[1].components["scale"].parameters *= 1.5
    apm = pmg.parameter_managers()[0]
    multiscaler.update_for_minimisation(apm, 0)
    multiscaler.update_for_minimisation(apm, 1)
    # bf[0], bf[1] should be list of scales and derivatives
    s1, d1 = RefinerCalculator.calculate_scales_and_derivatives(
        apm.apm_list[0], 0)
    s2, d2 = RefinerCalculator.calculate_scales_and_derivatives(
        apm.apm_list[1], 0)
    s3, d3 = RefinerCalculator.calculate_scales_and_derivatives(
        apm.apm_list[0], 1)
    s4, d4 = RefinerCalculator.calculate_scales_and_derivatives(
        apm.apm_list[1], 1)
    expected_scales_for_block_1 = s1
    expected_scales_for_block_1.extend(s2)
    expected_scales_for_block_2 = s3
    expected_scales_for_block_2.extend(s4)

    expected_derivatives_for_block_1 = sparse.matrix(
        expected_scales_for_block_1.size(), apm.n_active_params)
    expected_derivatives_for_block_2 = sparse.matrix(
        expected_scales_for_block_2.size(), apm.n_active_params)

    expected_derivatives_for_block_1.assign_block(d1, 0, 0)
    expected_derivatives_for_block_1.assign_block(d2, d1.n_rows,
                                                  apm.apm_data[1]["start_idx"])
    expected_derivatives_for_block_2.assign_block(d3, 0, 0)
    expected_derivatives_for_block_2.assign_block(d4, d3.n_rows,
                                                  apm.apm_data[1]["start_idx"])

    block_list = multiscaler.Ih_table.blocked_data_list

    assert block_list[0].inverse_scale_factors == expected_scales_for_block_1
    assert block_list[1].inverse_scale_factors == expected_scales_for_block_2
    assert block_list[1].derivatives == expected_derivatives_for_block_2
    assert block_list[0].derivatives == expected_derivatives_for_block_1
Beispiel #2
0
 def create_from_targetscaler(cls, targetscaler):
     """method to pass scalers from TargetScaler to a MultiScaler"""
     single_scalers = []
     for scaler in targetscaler.unscaled_scalers:
         single_scalers.append(scaler)
     single_scalers.extend(targetscaler.single_scalers)
     multiscaler = MultiScaler(single_scalers)
     multiscaler.observers = targetscaler.observers
     return multiscaler
Beispiel #3
0
 def create_from_targetscaler(cls, targetscaler):
     """method to pass scalers from TargetScaler to a MultiScaler"""
     single_scalers = []
     for scaler in targetscaler.unscaled_scalers:
         # scaler.select_reflections_for_scaling(for_multi=True)
         single_scalers.append(scaler)
     single_scalers.extend(targetscaler.single_scalers)
     multiscaler = MultiScaler(targetscaler.params,
                               [targetscaler.experiment], single_scalers)
     multiscaler.observers = targetscaler.observers
     return multiscaler
Beispiel #4
0
 def create(cls, params, experiments, reflections):
     """create a list of single scalers to pass to a MultiScaler."""
     single_scalers = []
     offset = 0
     for i in range(len(reflections)):
         # Remove bad datasets that literally have no integrated reflections
         try:
             scaler = SingleScalerFactory.create(
                 params,
                 experiments[i - offset],
                 reflections[i - offset],
                 for_multi=True,
             )
             single_scalers.append(scaler)
         except BadDatasetForScalingException as e:
             logger.info(e)
             logger.info("Removing experiment " + str(i) + "\n" + "=" * 80 +
                         "\n")
             del experiments[i - offset]
             del reflections[i - offset]
             offset += 1
     assert len(experiments) == len(single_scalers), (
         len(experiments),
         len(single_scalers),
     )
     assert len(experiments) == len(reflections), (
         len(experiments),
         len(reflections),
     )
     determine_reflection_selection_parameters(params, experiments,
                                               reflections)
     return MultiScaler(params, experiments, single_scalers)
Beispiel #5
0
 def create(cls, params, experiments, reflections):
     """create a list of single scalers to pass to a MultiScaler."""
     single_scalers = []
     idx_to_remove = []
     for i, (expt, refl) in enumerate(zip(experiments, reflections)):
         # Remove bad datasets that literally have no integrated reflections
         try:
             scaler = SingleScalerFactory.create(params,
                                                 expt,
                                                 refl,
                                                 for_multi=True)
         except BadDatasetForScalingException as e:
             logger.info(e)
             idx_to_remove.append(i)
         else:
             single_scalers.append(scaler)
     if idx_to_remove:
         for j in idx_to_remove[::-1]:
             del experiments[j]
             del reflections[j]
         logger.info("Removed experiments %s",
                     " ".join(str(i) for i in idx_to_remove))
     n_exp, n_refl, n_ss = (len(experiments), len(reflections),
                            len(single_scalers))
     assert n_exp == n_ss, (n_exp, n_ss)
     assert n_exp == n_refl, (n_exp, n_refl)
     return MultiScaler(single_scalers)
Beispiel #6
0
def test_multiscaler_initialisation():
    """Unit tests for the MultiScalerBase class."""
    p, e = (generated_param(), generated_exp(2))
    r1 = generated_refl(id_=0)
    r1["intensity.sum.value"] = r1["intensity"]
    r1["intensity.sum.variance"] = r1["variance"]
    r2 = generated_refl(id_=1)
    r2["intensity.sum.value"] = r2["intensity"]
    r2["intensity.sum.variance"] = r2["variance"]
    exp = create_scaling_model(p, e, [r1, r2])
    singlescaler1 = create_scaler(p, [exp[0]], [r1])
    singlescaler2 = create_scaler(p, [exp[1]], [r2])

    multiscaler = MultiScaler([singlescaler1, singlescaler2])

    # check initialisation
    assert len(multiscaler.active_scalers) == 2
    assert multiscaler.active_scalers[0] == singlescaler1
    assert multiscaler.active_scalers[1] == singlescaler2

    # check for correct setup of global Ih table
    assert multiscaler.global_Ih_table.size == 14
    assert (
        list(multiscaler.global_Ih_table.blocked_data_list[0].intensities)
        == [3.0, 1.0, 500.0, 2.0, 2.0, 2.0, 4.0] * 2
    )
    block_selections = multiscaler.global_Ih_table.blocked_data_list[0].block_selections
    assert list(block_selections[0]) == [2, 0, 4, 5, 6, 1, 3]
    assert list(block_selections[1]) == [2, 0, 4, 5, 6, 1, 3]

    # check for correct setup of Ih_table
    assert multiscaler.Ih_table.size == 12
    assert (
        list(multiscaler.Ih_table.blocked_data_list[0].intensities)
        == [3.0, 1.0, 2.0, 2.0, 2.0, 4.0] * 2
    )
    block_selections = multiscaler.Ih_table.blocked_data_list[0].block_selections
    assert list(block_selections[0]) == [2, 0, 5, 6, 1, 3]
    assert list(block_selections[1]) == [2, 0, 5, 6, 1, 3]

    # check for correct data/d_values in components
    for i, scaler in enumerate(multiscaler.active_scalers):
        d_suitable = scaler.reflection_table["d"].select(
            scaler.suitable_refl_for_scaling_sel
        )
        decay = scaler.experiment.scaling_model.components["decay"]
        # first check 'data' contains all suitable reflections
        assert list(decay.data["d"]) == list(d_suitable)
        # Now check 'd_values' (which will be used for minim.) matches Ih_table data
        assert list(decay.d_values[0]) == list(
            d_suitable.select(flumpy.from_numpy(block_selections[i]))
        )