예제 #1
0
 def calculate_merging_stats(self):
     try:
         self.merging_statistics_result, self.anom_merging_statistics_result = merging_stats_from_scaled_array(
             self.scaled_miller_array,
             self.params.output.merging.nbins,
             self.params.output.use_internal_variance,
         )
     except DialsMergingStatisticsError as e:
         logger.info(e)
예제 #2
0
 def stats_only(reflections, experiments, params):
     """Calculate and print merging stats."""
     best_unit_cell = params.reflection_selection.best_unit_cell
     if not params.reflection_selection.best_unit_cell:
         best_unit_cell = determine_best_unit_cell(experiments)
     scaled_miller_array = scaled_data_as_miller_array(
         reflections, experiments, best_unit_cell=best_unit_cell)
     try:
         res, _ = merging_stats_from_scaled_array(
             scaled_miller_array,
             params.output.merging.nbins,
             params.output.use_internal_variance,
         )
         logger.info(make_merging_statistics_summary(res))
     except DialsMergingStatisticsError as e:
         logger.info(e)
예제 #3
0
def test_table_1_summary(dials_data):

    location = dials_data("l_cysteine_4_sweeps_scaled")
    expts = load.experiment_list(location.join("scaled_20_25.expt"),
                                 check_format=False)
    refls = flex.reflection_table.from_file(location.join("scaled_20_25.refl"))
    # Get a miller array of real data and calculate an iotbx.merging_statistics
    ma = scaled_data_as_miller_array([refls], expts)
    arr, anom = merging_stats_from_scaled_array(ma)

    # Test that something is returned in each case
    ### Case of overall statistics summary
    out = table_1_summary(arr, anom)
    assert out
    assert all(a in out for a in ("Overall", "Low", "High"))
    assert "Suggested" not in out
    ### Case of overall and suggested statistics summary (with anom)
    out = table_1_summary(arr, anom, arr, anom)
    assert out
    assert all(a in out for a in ("Overall", "Suggested", "Low", "High"))
    ### Case of no anomalous, but with suggested as well as overall.
    out = table_1_summary(arr, selected_statistics=arr)
    assert out
    assert all(a in out for a in ("Overall", "Suggested", "Low", "High"))
예제 #4
0
def print_scaling_summary(script):
    """Log summary information after scaling."""
    logger.info(print_scaling_model_error_summary(script.experiments))
    valid_ranges = get_valid_image_ranges(script.experiments)
    image_ranges = get_image_ranges(script.experiments)
    msg = []
    for (img, valid, refl) in zip(image_ranges, valid_ranges,
                                  script.reflections):
        if valid:
            if len(valid
                   ) > 1 or valid[0][0] != img[0] or valid[-1][1] != img[1]:
                msg.append(
                    "Excluded images for experiment id: %s, image range: %s, limited range: %s"
                    % (
                        refl.experiment_identifiers().keys()[0],
                        list(img),
                        list(valid),
                    ))
    if msg:
        msg = ["Summary of image ranges removed:"] + msg
        logger.info("\n".join(msg))

    # report on partiality of dataset
    partials = flex.double()
    for r in script.reflections:
        if "partiality" in r:
            partials.extend(r["partiality"])
    not_full_sel = partials < 0.99
    not_zero_sel = partials > 0.01
    gt_half = partials > 0.5
    lt_half = partials < 0.5
    partial_gt_half_sel = not_full_sel & gt_half
    partial_lt_half_sel = not_zero_sel & lt_half
    logger.info("Summary of dataset partialities")
    header = ["Partiality (p)", "n_refl"]
    rows = [
        ["all reflections", str(partials.size())],
        ["p > 0.99", str(not_full_sel.count(False))],
        ["0.5 < p < 0.99",
         str(partial_gt_half_sel.count(True))],
        ["0.01 < p < 0.5",
         str(partial_lt_half_sel.count(True))],
        ["p < 0.01", str(not_zero_sel.count(False))],
    ]
    logger.info(tabulate(rows, header))
    logger.info(
        """
Reflections below a partiality_cutoff of %s are not considered for any
part of the scaling analysis or for the reporting of merging statistics.
Additionally, if applicable, only reflections with a min_partiality > %s
were considered for use when refining the scaling model.
""",
        script.params.cut_data.partiality_cutoff,
        script.params.reflection_selection.min_partiality,
    )
    stats = script.merging_statistics_result
    if stats:
        anom_stats, cut_stats, cut_anom_stats = (None, None, None)
        if not script.scaled_miller_array.space_group().is_centric():
            anom_stats = script.anom_merging_statistics_result
        logger.info(make_merging_statistics_summary(stats))
        try:
            d_min = resolution_cc_half(stats, limit=0.3).d_min
        except RuntimeError as e:
            logger.debug(f"Resolution fit failed: {e}")
        else:
            max_current_res = stats.bins[-1].d_min
            if d_min and d_min - max_current_res > 0.005:
                logger.info(
                    "Resolution limit suggested from CC" + "\u00BD" +
                    " fit (limit CC" + "\u00BD" + "=0.3): %.2f",
                    d_min,
                )
                try:
                    cut_stats, cut_anom_stats = merging_stats_from_scaled_array(
                        script.scaled_miller_array.resolution_filter(
                            d_min=d_min),
                        script.params.output.merging.nbins,
                        script.params.output.use_internal_variance,
                    )
                except DialsMergingStatisticsError:
                    pass
                else:
                    if script.scaled_miller_array.space_group().is_centric():
                        cut_anom_stats = None
        logger.info(
            table_1_summary(stats, anom_stats, cut_stats, cut_anom_stats))
예제 #5
0
파일: merge.py 프로젝트: TiankunZhou/dials
def merge(
    experiments,
    reflections,
    d_min=None,
    d_max=None,
    combine_partials=True,
    partiality_threshold=0.4,
    best_unit_cell=None,
    anomalous=True,
    use_internal_variance=False,
    assess_space_group=False,
    n_bins=20,
):
    """
    Merge reflection table data and generate a summary of the merging statistics.

    This procedure filters the input data, merges the data (normal and optionally
    anomalous), assesses the space group symmetry and generates a summary
    of the merging statistics.
    """

    logger.info("\nMerging scaled reflection data\n")
    # first filter bad reflections using dials.util.filter methods
    reflections = filter_reflection_table(
        reflections,
        intensity_choice=["scale"],
        d_min=d_min,
        d_max=d_max,
        combine_partials=combine_partials,
        partiality_threshold=partiality_threshold,
    )
    # ^ scale factor has been applied, so now set to 1.0 - okay as not
    # going to output scale factor in merged mtz.
    reflections["inverse_scale_factor"] = flex.double(reflections.size(), 1.0)

    scaled_array = scaled_data_as_miller_array([reflections], experiments,
                                               best_unit_cell)
    # Note, merge_equivalents does not raise an error if data is unique.
    merged = scaled_array.merge_equivalents(
        use_internal_variance=use_internal_variance).array()
    merged_anom = None

    if anomalous:
        anomalous_scaled = scaled_array.as_anomalous_array()
        merged_anom = anomalous_scaled.merge_equivalents(
            use_internal_variance=use_internal_variance).array()

    # Before merge, do assessment of the space_group
    if assess_space_group:
        merged_reflections = flex.reflection_table()
        merged_reflections["intensity"] = merged.data()
        merged_reflections["variance"] = flex.pow2(merged.sigmas())
        merged_reflections["miller_index"] = merged.indices()
        logger.info("Running systematic absences check")
        run_systematic_absences_checks(experiments, merged_reflections)

    try:
        stats, anom_stats = merging_stats_from_scaled_array(
            scaled_array,
            n_bins,
            use_internal_variance,
        )
    except DialsMergingStatisticsError as e:
        logger.error(e, exc_info=True)
        stats_summary = None
    else:
        stats_summary = make_merging_statistics_summary(stats)
        stats_summary += table_1_summary(stats, anom_stats)

    return merged, merged_anom, stats_summary
예제 #6
0
파일: merge.py 프로젝트: hattne/dials
def merge_and_truncate(params, experiments, reflections):
    """Filter data, assess space group, run french wilson and Wilson stats."""

    logger.info("\nMerging scaled reflection data\n")
    # first filter bad reflections using dials.util.filter methods
    reflections = filter_reflection_table(
        reflections[0],
        intensity_choice=["scale"],
        d_min=params.d_min,
        combine_partials=params.combine_partials,
        partiality_threshold=params.partiality_threshold,
    )
    # ^ scale factor has been applied, so now set to 1.0 - okay as not
    # going to output scale factor in merged mtz.
    reflections["inverse_scale_factor"] = flex.double(reflections.size(), 1.0)

    scaled_array = scaled_data_as_miller_array([reflections], experiments)
    if params.anomalous:
        anomalous_scaled = scaled_array.as_anomalous_array()

    merged = scaled_array.merge_equivalents(
        use_internal_variance=params.merging.use_internal_variance).array()
    merged_anom = None
    if params.anomalous:
        merged_anom = anomalous_scaled.merge_equivalents(
            use_internal_variance=params.merging.use_internal_variance).array(
            )

    # Before merge, do some assessment of the space_group
    if params.assess_space_group:
        merged_reflections = flex.reflection_table()
        merged_reflections["intensity"] = merged.data()
        merged_reflections["variance"] = merged.sigmas()**2
        merged_reflections["miller_index"] = merged.indices()
        logger.info("Running systematic absences check")
        run_sys_abs_checks(experiments, merged_reflections)

    # Run the stats on truncating on anomalous or non anomalous?
    if params.anomalous:
        intensities = merged_anom
    else:
        intensities = merged

    assert intensities.is_xray_intensity_array()
    amplitudes = None
    anom_amplitudes = None
    if params.truncate:
        logger.info("\nScaling input intensities via French-Wilson Method")
        out = StringIO()
        if params.anomalous:
            anom_amplitudes = intensities.french_wilson(params=params, log=out)
            n_removed = intensities.size() - anom_amplitudes.size()
            assert anom_amplitudes.is_xray_amplitude_array()
            amplitudes = anom_amplitudes.as_non_anomalous_array()
            amplitudes = amplitudes.merge_equivalents().array()
        else:
            amplitudes = intensities.french_wilson(params=params, log=out)
            n_removed = intensities.size() - amplitudes.size()
        logger.info("Total number of rejected intensities %s", n_removed)
        logger.debug(out.getvalue())

    if params.reporting.wilson_stats:
        if not intensities.space_group().is_centric():
            wilson_scaling = data_statistics.wilson_scaling(
                miller_array=intensities,
                n_residues=params.n_residues)  # XXX default n_residues?
            # Divert output through logger - do with StringIO rather than
            # info_handle else get way too much whitespace in output.
            out = StringIO()
            wilson_scaling.show(out=out)
            logger.info(out.getvalue())

    # Apply wilson B to give absolute scale?

    # Show merging stats again.
    if params.reporting.merging_stats:
        stats, anom_stats = merging_stats_from_scaled_array(
            scaled_array, params.merging.n_bins,
            params.merging.use_internal_variance)
        if params.merging.anomalous:
            logger.info(make_merging_statistics_summary(anom_stats))
        else:
            logger.info(make_merging_statistics_summary(stats))

    return merged, merged_anom, amplitudes, anom_amplitudes