def calculate_scaling_subset_ranges_with_E2(reflection_table, params): """Select reflections with non-zero weight and update scale weights.""" reasons = Reasons() selection = ~reflection_table.get_flags( reflection_table.flags.user_excluded_in_scaling) selection &= ~reflection_table.get_flags( reflection_table.flags.excluded_for_scaling) reasons.add_reason("suitable/selected for scaling", selection.count(True)) if reflection_table["Esq"].count(1.0) != reflection_table.size(): sel, reason = _determine_E2_range_selection(reflection_table, params) reasons.add_reason(reason, sel.count(True)) selection &= sel sel, reasons = _common_range_selections(reasons, reflection_table, params) selection &= sel logger.info( "%s reflections were selected for scale factor determination \n" + "out of %s suitable reflections: \n%s", selection.count(True), reflection_table.size(), reasons, ) if selection.count(True) == 0: raise BadDatasetForScalingException( """No reflections pass all user-controllable selection criteria""") return selection
def calculate_scaling_subset_connected(global_Ih_table, experiment, params, preselection=None, print_summary=False): """Determine the selection for the reflection table of suitable reflections. A preselection can be given, which is applied to the global_Ih_table before determining the connected subset.""" min_per_area = params.reflection_selection.quasi_random.min_per_area[0] n_resolution_bins = params.reflection_selection.quasi_random.n_resolution_bins[ 0] suitable_selection = flex.bool(global_Ih_table.size, False) if preselection: Ih_table = global_Ih_table.select(preselection) else: Ih_table = global_Ih_table indices = select_highly_connected_reflections(Ih_table, experiment, min_per_area, n_resolution_bins, print_summary) suitable_selection.set_selected(indices, True) if print_summary: logger.info( "%s reflections were selected for scale factor determination \n" + "out of %s suitable reflections. ", suitable_selection.count(True), global_Ih_table.size, ) if suitable_selection.count(True) == 0: raise BadDatasetForScalingException( """No reflections pass all user-controllable selection criteria""") return suitable_selection
def calculate_scaling_subset_ranges(reflection_table, params, print_summary=False): selection, reasons = _common_range_selections(Reasons(), reflection_table, params) if print_summary: logger.info( "%s reflections were preselected for scale factor determination \n" + "out of %s suitable reflections: \n%s", selection.count(True), reflection_table.size(), reasons, ) if selection.count(True) == 0: raise BadDatasetForScalingException( """No reflections pass all user-controllable selection criteria""") return selection
def calculate_scaling_subset_connected(Ih_table, experiment, params, print_summary=False): """Determine the selection for the reflection table of suitable reflections. A preselection can be given, which is applied to the global_Ih_table before determining the connected subset.""" min_per_area = params.reflection_selection.quasi_random.min_per_area[0] n_resolution_bins = params.reflection_selection.quasi_random.n_resolution_bins[ 0] indices = select_highly_connected_reflections(Ih_table, experiment, min_per_area, n_resolution_bins, print_summary) if indices.size() == 0: raise BadDatasetForScalingException( """No reflections pass all user-controllable selection criteria""") return indices
def create(cls, params, experiment, reflection_table, for_multi=False): """Perform reflection_table preprocessing and create a SingleScaler.""" cls.ensure_experiment_identifier(params, experiment, reflection_table) logger.info( "Preprocessing data for scaling. The id assigned to this \n" "dataset is %s, and the scaling model type being applied is %s. \n", list(reflection_table.experiment_identifiers().values())[0], experiment.scaling_model.id_, ) reflection_table, reasons = cls.filter_bad_reflections( reflection_table) if "inverse_scale_factor" not in reflection_table: reflection_table["inverse_scale_factor"] = flex.double( reflection_table.size(), 1.0) elif (reflection_table["inverse_scale_factor"].count(0.0) == reflection_table.size()): reflection_table["inverse_scale_factor"] = flex.double( reflection_table.size(), 1.0) reflection_table = choose_scaling_intensities( reflection_table, params.reflection_selection.intensity_choice) excluded_for_scaling = reflection_table.get_flags( reflection_table.flags.excluded_for_scaling) user_excluded = reflection_table.get_flags( reflection_table.flags.user_excluded_in_scaling) reasons.add_reason("user excluded", user_excluded.count(True)) reasons.add_reason("excluded for scaling", excluded_for_scaling.count(True)) n_excluded = (excluded_for_scaling | user_excluded).count(True) if n_excluded == reflection_table.size(): logger.info( "All reflections were determined to be unsuitable for scaling." ) logger.info(reasons) raise BadDatasetForScalingException( """Unable to use this dataset for scaling""") else: logger.info( "%s/%s reflections not suitable for scaling\n%s", n_excluded, reflection_table.size(), reasons, ) if not for_multi: determine_reflection_selection_parameters(params, [experiment], [reflection_table]) if params.reflection_selection.method == "intensity_ranges": reflection_table = quasi_normalisation(reflection_table, experiment) if (params.reflection_selection.method in (None, Auto, "auto", "quasi_random")) or ( experiment.scaling_model.id_ == "physical" and "absorption" in experiment.scaling_model.components): if experiment.scan: # calc theta and phi cryst reflection_table["phi"] = ( reflection_table["xyzobs.px.value"].parts()[2] * experiment.scan.get_oscillation()[1]) reflection_table = calc_crystal_frame_vectors( reflection_table, experiment) return SingleScaler(params, experiment, reflection_table, for_multi)
def create(cls, params, experiment, reflection_table, for_multi=False): """Perform reflection_table preprocessing and create a SingleScaler.""" cls.ensure_experiment_identifier(experiment, reflection_table) logger.info( "The scaling model type being applied is %s. \n", experiment.scaling_model.id_, ) try: reflection_table = cls.filter_bad_reflections( reflection_table, partiality_cutoff=params.cut_data.partiality_cutoff, min_isigi=params.cut_data.min_isigi, intensity_choice=params.reflection_selection.intensity_choice, ) except ValueError: raise BadDatasetForScalingException # combine partial measurements of same reflection, to handle those reflections # that were split by dials.integrate - changes size of reflection table. reflection_table = sum_partial_reflections(reflection_table) if "inverse_scale_factor" not in reflection_table: reflection_table["inverse_scale_factor"] = flex.double( reflection_table.size(), 1.0) elif (reflection_table["inverse_scale_factor"].count(0.0) == reflection_table.size()): reflection_table["inverse_scale_factor"] = flex.double( reflection_table.size(), 1.0) reflection_table = choose_initial_scaling_intensities( reflection_table, params.reflection_selection.intensity_choice) excluded_for_scaling = reflection_table.get_flags( reflection_table.flags.excluded_for_scaling) user_excluded = reflection_table.get_flags( reflection_table.flags.user_excluded_in_scaling) reasons = Reasons() reasons.add_reason("user excluded", user_excluded.count(True)) reasons.add_reason("excluded for scaling", excluded_for_scaling.count(True)) n_excluded = (excluded_for_scaling | user_excluded).count(True) if n_excluded == reflection_table.size(): logger.info( "All reflections were determined to be unsuitable for scaling." ) logger.info(reasons) raise BadDatasetForScalingException( """Unable to use this dataset for scaling""") else: logger.info( "Excluding %s/%s reflections\n%s", n_excluded, reflection_table.size(), reasons, ) if params.reflection_selection.method == "intensity_ranges": reflection_table = quasi_normalisation(reflection_table, experiment) if (params.reflection_selection.method in (None, Auto, "auto", "quasi_random")) or ( experiment.scaling_model.id_ == "physical" and "absorption" in experiment.scaling_model.components): if experiment.scan: reflection_table = calc_crystal_frame_vectors( reflection_table, experiment) alignment_axis = (1.0, 0.0, 0.0) reflection_table["s0c"] = align_axis_along_z( alignment_axis, reflection_table["s0c"]) reflection_table["s1c"] = align_axis_along_z( alignment_axis, reflection_table["s1c"]) try: scaler = SingleScaler(params, experiment, reflection_table, for_multi) except BadDatasetForScalingException as e: raise ValueError(e) else: return scaler