def test_choose_initial_scaling_intensities(test_reflections): """Test for correct choice of intensities.""" test_refl = test_reflections intstr = "prf" new_rt = choose_initial_scaling_intensities(test_refl, intstr) assert list(new_rt["intensity"]) == list(test_refl["intensity.prf.value"]) assert list(new_rt["variance"]) == list( test_refl["intensity.prf.variance"]) intstr = "sum" # should apply partiality correction new_rt = choose_initial_scaling_intensities(test_refl, intstr) assert list(new_rt["intensity"]) == list(test_refl["intensity.sum.value"] / test_refl["partiality"]) assert list(new_rt["variance"]) == pytest.approx( list(test_refl["intensity.sum.variance"] / flex.pow2(test_refl["partiality"])))
def refine_error_model(params, experiments, reflection_tables): """Do error model refinement.""" # prepare relevant data for datastructures for i, table in enumerate(reflection_tables): # First get the good data table = table.select(~table.get_flags(table.flags.bad_for_scaling, all=False)) # Now chose intensities, ideally these two options could be combined # with a smart refactor if params.intensity_choice == "combine": if not params.combine.Imid: sys.exit("Imid value must be provided if intensity_choice=combine") table = calculate_prescaling_correction(table) # needed for below. I, V = combine_intensities(table, params.combine.Imid) table["intensity"] = I table["variance"] = V else: table = choose_initial_scaling_intensities( table, intensity_choice=params.intensity_choice ) reflection_tables[i] = table space_group = experiments[0].crystal.get_space_group() Ih_table = IhTable( reflection_tables, space_group, additional_cols=["partiality"], anomalous=True ) # now do the error model refinement model = BasicErrorModel(basic_params=params.basic) try: model = run_error_model_refinement(model, Ih_table) except (ValueError, RuntimeError) as e: logger.info(e) else: return model
def create(cls, params, experiment, reflection_table, for_multi=False): """Perform reflection_table preprocessing and create a SingleScaler.""" cls.ensure_experiment_identifier(experiment, reflection_table) logger.info( "The scaling model type being applied is %s. \n", experiment.scaling_model.id_, ) try: reflection_table = cls.filter_bad_reflections( reflection_table, partiality_cutoff=params.cut_data.partiality_cutoff, min_isigi=params.cut_data.min_isigi, intensity_choice=params.reflection_selection.intensity_choice, ) except ValueError: raise BadDatasetForScalingException # combine partial measurements of same reflection, to handle those reflections # that were split by dials.integrate - changes size of reflection table. reflection_table = sum_partial_reflections(reflection_table) if "inverse_scale_factor" not in reflection_table: reflection_table["inverse_scale_factor"] = flex.double( reflection_table.size(), 1.0) elif (reflection_table["inverse_scale_factor"].count(0.0) == reflection_table.size()): reflection_table["inverse_scale_factor"] = flex.double( reflection_table.size(), 1.0) reflection_table = choose_initial_scaling_intensities( reflection_table, params.reflection_selection.intensity_choice) excluded_for_scaling = reflection_table.get_flags( reflection_table.flags.excluded_for_scaling) user_excluded = reflection_table.get_flags( reflection_table.flags.user_excluded_in_scaling) reasons = Reasons() reasons.add_reason("user excluded", user_excluded.count(True)) reasons.add_reason("excluded for scaling", excluded_for_scaling.count(True)) n_excluded = (excluded_for_scaling | user_excluded).count(True) if n_excluded == reflection_table.size(): logger.info( "All reflections were determined to be unsuitable for scaling." ) logger.info(reasons) raise BadDatasetForScalingException( """Unable to use this dataset for scaling""") else: logger.info( "Excluding %s/%s reflections\n%s", n_excluded, reflection_table.size(), reasons, ) if params.reflection_selection.method == "intensity_ranges": reflection_table = quasi_normalisation(reflection_table, experiment) if (params.reflection_selection.method in (None, Auto, "auto", "quasi_random")) or ( experiment.scaling_model.id_ == "physical" and "absorption" in experiment.scaling_model.components): if experiment.scan: reflection_table = calc_crystal_frame_vectors( reflection_table, experiment) alignment_axis = (1.0, 0.0, 0.0) reflection_table["s0c"] = align_axis_along_z( alignment_axis, reflection_table["s0c"]) reflection_table["s1c"] = align_axis_along_z( alignment_axis, reflection_table["s1c"]) try: scaler = SingleScaler(params, experiment, reflection_table, for_multi) except BadDatasetForScalingException as e: raise ValueError(e) else: return scaler