예제 #1
0
  def histogram(self, reflections, title):
    data = reflections['difference_vector_norms']
    n_slots = 100
    if self.params.residuals.histogram_max is None:
      h = flex.histogram(data, n_slots=n_slots)
    else:
      h = flex.histogram(data.select(data <= self.params.residuals.histogram_max), n_slots=n_slots)

    n = len(reflections)
    rmsd = math.sqrt((reflections['xyzcal.mm']-reflections['xyzobs.mm.value']).sum_sq()/n)
    sigma = mode = h.slot_centers()[list(h.slots()).index(flex.max(h.slots()))]
    mean = flex.mean(data)
    median = flex.median(data)
    print "RMSD (microns)", rmsd * 1000
    print "Histogram mode (microns):", mode * 1000
    print "Overall mean (microns):", mean * 1000
    print "Overall median (microns):", median * 1000
    mean2 = math.sqrt(math.pi/2)*sigma
    rmsd2 = math.sqrt(2)*sigma
    print "Rayleigh Mean (microns)", mean2 * 1000
    print "Rayleigh RMSD (microns)", rmsd2 * 1000

    r = reflections['radial_displacements']
    t = reflections['transverse_displacements']
    print "Overall radial RMSD (microns)", math.sqrt(flex.sum_sq(r)/len(r)) * 1000
    print "Overall transverse RMSD (microns)", math.sqrt(flex.sum_sq(t)/len(t)) * 1000

    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.plot(h.slot_centers().as_numpy_array(), h.slots().as_numpy_array(), '-')

    vmax = self.params.residuals.plot_max
    if self.params.residuals.histogram_xmax is not None:
      ax.set_xlim((0,self.params.residuals.histogram_xmax))
    if self.params.residuals.histogram_ymax is not None:
      ax.set_ylim((0,self.params.residuals.histogram_ymax))
    plt.title(title)


    ax.plot((mean, mean), (0, flex.max(h.slots())), 'g-')
    ax.plot((mean2, mean2), (0, flex.max(h.slots())), 'g--')
    ax.plot((mode, mode), (0, flex.max(h.slots())), 'r-')
    ax.plot((rmsd, rmsd), (0, flex.max(h.slots())), 'b-')
    ax.plot((rmsd2, rmsd2), (0, flex.max(h.slots())), 'b--')

    ax.legend([r"$\Delta$XY", "MeanObs", "MeanRayl", "Mode", "RMSDObs", "RMSDRayl"])
    ax.set_xlabel("(mm)")
    ax.set_ylabel("Count")
예제 #2
0
    def _extend_by_candidates(self, graph):

        existing_ids = [e["spot_id"] for e in graph.vertices]
        obs_relps = [matrix.col(self.spots[e]["rlp"]) for e in existing_ids]
        exp_relps = [e["rlp_datum"] for e in graph.vertices]

        result = []

        for cand in self.stems:
            # Don't check spots already matched
            if cand["spot_id"] in existing_ids:
                continue

            # Compare expected reciprocal space distances with observed distances
            cand_rlp = matrix.col(self.spots[cand["spot_id"]]["rlp"])
            cand_vec = cand["rlp_datum"]

            obs_dists = [(cand_rlp - rlp).length() for rlp in obs_relps]
            exp_dists = [(vec - cand_vec).length() for vec in exp_relps]

            residual_dist = [
                abs(a - b) for (a, b) in zip(obs_dists, exp_dists)
            ]

            # If any of the distance differences is larger than the sum in quadrature
            # of the tolerated d* bands then reject the candidate
            sq_candidate_band = self.spots[cand["spot_id"]]["d_star_band2"]
            bad_candidate = False
            for r_dist, spot_id in zip(residual_dist, existing_ids):
                sq_relp_band = self.spots[spot_id]["d_star_band2"]
                if r_dist > math.sqrt(sq_relp_band + sq_candidate_band):
                    bad_candidate = True
                    break
            if bad_candidate:
                continue

            # Calculate co-planarity of the relps, including the origin
            points = flex.vec3_double(exp_relps + [cand_vec, (0.0, 0.0, 0.0)])
            plane = least_squares_plane(points)
            plane_score = flex.sum_sq(
                points.dot(plane.normal) - plane.distance_to_origin)

            # Reject if the group of relps are too far from lying in a single plane.
            # This cut-off was determined by trial and error using simulated images.
            if plane_score > 6e-7:
                continue

            # Construct a graph including the accepted candidate node
            g = graph.factory_add_vertex(
                {
                    "spot_id": cand["spot_id"],
                    "miller_index": cand["miller_index"],
                    "rlp_datum": cand["rlp_datum"],
                },
                weights_to_other=residual_dist,
            )

            result.append(g)

        return result
예제 #3
0
  def plot_data_by_two_theta(self, reflections, tag):
    n_bins = 30
    arbitrary_padding = 1
    sorted_two_theta = flex.sorted(reflections['two_theta_obs'])
    bin_low = [sorted_two_theta[int((len(sorted_two_theta)/n_bins) * i)] for i in xrange(n_bins)]
    bin_high = [bin_low[i+1] for i in xrange(n_bins-1)]
    bin_high.append(sorted_two_theta[-1]+arbitrary_padding)

    title = "%sBinned data by two theta (n reflections per bin: %.1f)"%(tag, len(sorted_two_theta)/n_bins)

    x = flex.double()
    x_centers = flex.double()
    n_refls = flex.double()
    rmsds = flex.double()
    radial_rmsds = flex.double()
    transverse_rmsds = flex.double()
    rt_ratio = flex.double()
    #delta_two_theta = flex.double()
    rmsd_delta_two_theta = flex.double()

    for i in xrange(n_bins):
      x_centers.append(((bin_high[i]-bin_low[i])/2) + bin_low[i])
      refls = reflections.select((reflections['two_theta_obs'] >= bin_low[i]) & (reflections['two_theta_obs'] < bin_high[i]))
      n = len(refls)
      n_refls.append(n)
      rmsds.append(1000*math.sqrt(flex.sum_sq(refls['difference_vector_norms'])/n))
      radial_rmsds.append(1000*math.sqrt(flex.sum_sq(refls['radial_displacements'])/n))
      transverse_rmsds.append(1000*math.sqrt(flex.sum_sq(refls['transverse_displacements'])/n))
      rt_ratio.append(radial_rmsds[-1]/transverse_rmsds[-1])
      rmsd_delta_two_theta.append(math.sqrt(flex.sum_sq(refls['two_theta_obs']-refls['two_theta_cal'])/n))
      #delta_two_theta.append(flex.mean(refls['two_theta_obs']-refls['two_theta_cal']))
    assert len(reflections) == flex.sum(n_refls)

    self.plot_multi_data(x_centers,
                         [rt_ratio, (rmsds, radial_rmsds, transverse_rmsds), rmsd_delta_two_theta],
                         "Two theta (degrees)",
                         ["R/T RMSD ratio",
                          ("Overall RMSD","Radial RMSD","Transverse RMSD"),
                          "RMSD delta two theta"],
                         ["R/T RMSD ratio",
                          "Overall, radial, transverse RMSD (microns)",
                          "Delta two theta RMSD (degrees)"],
                         title)
예제 #4
0
  def image_rmsd_histogram(self, reflections, tag):
    data = flex.double()
    for i in set(reflections['id']):
      refls = reflections.select(reflections['id']==i)
      if len(refls) == 0:
        continue
      rmsd = math.sqrt(flex.sum_sq(refls['difference_vector_norms'])/len(refls))
      data.append(rmsd)
    data *= 1000
    h = flex.histogram(data, n_slots=40)
    fig = plt.figure()
    ax = fig.add_subplot('111')
    ax.plot(h.slot_centers().as_numpy_array(), h.slots().as_numpy_array(), '-')
    plt.title("%sHistogram of image RMSDs"%tag)
    ax.set_xlabel("RMSD (microns)")
    ax.set_ylabel("Count")

    fig = plt.figure()
    ax = fig.add_subplot('111')
    plt.boxplot(data, vert=False)
    plt.title("%sBoxplot of image RMSDs"%tag)
    ax.set_xlabel("RMSD (microns)")
예제 #5
0
def run_with_preparsed(experiments, reflections, params):
    from dxtbx.model import ExperimentList
    from scitbx.math import five_number_summary

    print("Found", len(reflections), "reflections", "and", len(experiments),
          "experiments")

    filtered_reflections = flex.reflection_table()
    filtered_experiments = ExperimentList()

    skipped_reflections = flex.reflection_table()
    skipped_experiments = ExperimentList()

    if params.detector is not None:
        culled_reflections = flex.reflection_table()
        culled_experiments = ExperimentList()
        detector = experiments.detectors()[params.detector]
        for expt_id, experiment in enumerate(experiments):
            refls = reflections.select(reflections['id'] == expt_id)
            if experiment.detector is detector:
                culled_experiments.append(experiment)
                refls['id'] = flex.int(len(refls), len(culled_experiments) - 1)
                culled_reflections.extend(refls)
            else:
                skipped_experiments.append(experiment)
                refls['id'] = flex.int(len(refls),
                                       len(skipped_experiments) - 1)
                skipped_reflections.extend(refls)

        print("RMSD filtering %d experiments using detector %d, out of %d" %
              (len(culled_experiments), params.detector, len(experiments)))
        reflections = culled_reflections
        experiments = culled_experiments

    difference_vector_norms = (reflections['xyzcal.mm'] -
                               reflections['xyzobs.mm.value']).norms()

    if params.max_delta is not None:
        sel = difference_vector_norms <= params.max_delta
        reflections = reflections.select(sel)
        difference_vector_norms = difference_vector_norms.select(sel)

    data = flex.double()
    counts = flex.double()
    for i in range(len(experiments)):
        dvns = difference_vector_norms.select(reflections['id'] == i)
        counts.append(len(dvns))
        if len(dvns) == 0:
            data.append(0)
            continue
        rmsd = math.sqrt(flex.sum_sq(dvns) / len(dvns))
        data.append(rmsd)
    data *= 1000
    subset = data.select(counts > 0)
    print(len(subset), "experiments with > 0 reflections")

    if params.show_plots:
        h = flex.histogram(subset, n_slots=40)
        fig = plt.figure()
        ax = fig.add_subplot('111')
        ax.plot(h.slot_centers().as_numpy_array(),
                h.slots().as_numpy_array(), '-')
        plt.title("Histogram of %d image RMSDs" % len(subset))

        fig = plt.figure()
        plt.boxplot(subset, vert=False)
        plt.title("Boxplot of %d image RMSDs" % len(subset))
        plt.show()

    outliers = counts == 0
    min_x, q1_x, med_x, q3_x, max_x = five_number_summary(subset)
    print(
        "Five number summary of RMSDs (microns): min %.1f, q1 %.1f, med %.1f, q3 %.1f, max %.1f"
        % (min_x, q1_x, med_x, q3_x, max_x))
    iqr_x = q3_x - q1_x
    cut_x = params.iqr_multiplier * iqr_x
    outliers.set_selected(data > q3_x + cut_x, True)
    #outliers.set_selected(col < q1_x - cut_x, True) # Don't throw away the images that are outliers in the 'good' direction!

    for i in range(len(experiments)):
        if outliers[i]:
            continue
        refls = reflections.select(reflections['id'] == i)
        refls['id'] = flex.int(len(refls), len(filtered_experiments))
        filtered_reflections.extend(refls)
        filtered_experiments.append(experiments[i])

    #import IPython;IPython.embed()
    zeroes = counts == 0
    n_zero = len(counts.select(zeroes))
    print(
        "Removed %d bad experiments and %d experiments with zero reflections, out of %d (%%%.1f)"
        %
        (len(experiments) - len(filtered_experiments) - n_zero, n_zero,
         len(experiments), 100 *
         ((len(experiments) - len(filtered_experiments)) / len(experiments))))

    if params.detector is not None:
        crystals = filtered_experiments.crystals()
        for expt_id, experiment in enumerate(skipped_experiments):
            if experiment.crystal in crystals:
                filtered_experiments.append(experiment)
                refls = skipped_reflections.select(
                    skipped_reflections['id'] == expt_id)
                refls['id'] = flex.int(len(refls),
                                       len(filtered_experiments) - 1)
                filtered_reflections.extend(refls)

    if params.delta_psi_filter is not None:
        delta_psi = filtered_reflections['delpsical.rad'] * 180 / math.pi
        sel = (delta_psi <= params.delta_psi_filter) & (
            delta_psi >= -params.delta_psi_filter)
        l = len(filtered_reflections)
        filtered_reflections = filtered_reflections.select(sel)
        print("Filtering by delta psi, removing %d out of %d reflections" %
              (l - len(filtered_reflections), l))

    print("Final experiment count", len(filtered_experiments))
    return filtered_experiments, filtered_reflections
예제 #6
0
        def plotit(reflections, experiments):
            """
      Make the plots for a set of reflections and experiments.
      """
            detector = experiments.detectors()[0]
            beam = experiments.beams()[
                0]  # only used to compute resolution of 2theta
            reflections = reflections.select(
                reflections['intensity.sum.variance'] > 0)

            # Setup up deltaXY and two theta bins
            reflections['difference_vector_norms'] = (
                reflections['xyzcal.mm'] -
                reflections['xyzobs.mm.value']).norms()
            reflections = setup_stats(
                detector, experiments, reflections,
                two_theta_only=True)  # add two theta to reflection table
            sorted_two_theta = flex.sorted(reflections['two_theta_obs'])
            bin_low = [
                sorted_two_theta[int((len(sorted_two_theta) / n_bins) * i)]
                for i in range(n_bins)
            ]
            bin_high = [bin_low[i + 1] for i in range(n_bins - 1)]
            bin_high.append(sorted_two_theta[-1] + arbitrary_padding)

            x_centers = flex.double()
            n_refls = flex.int()
            rmsds = flex.double()
            p25r = flex.double()
            p50r = flex.double()
            p75r = flex.double()
            p25i = flex.double()
            p50i = flex.double()
            p75i = flex.double()
            print("# 2theta Res N dXY IsigI")

            # Compute stats for each bin
            for i in range(n_bins):
                refls = reflections.select(
                    (reflections['two_theta_obs'] >= bin_low[i])
                    & (reflections['two_theta_obs'] < bin_high[i]))
                # Only compute deltaXY stats on reflections with I/sigI at least 5
                i_sigi = refls['intensity.sum.value'] / flex.sqrt(
                    refls['intensity.sum.variance'])
                refls = refls.select(i_sigi >= 5)
                n = len(refls)
                if n < 10: continue
                min_r, q1_r, med_r, q3_r, max_r = five_number_summary(
                    1000 * refls['difference_vector_norms'])

                n_refls.append(n)

                rmsds_ = 1000 * math.sqrt(
                    flex.sum_sq(refls['difference_vector_norms']) / n)

                min_i, q1_i, med_i, q3_i, max_i = five_number_summary(i_sigi)
                p25i.append(q1_i)
                p50i.append(med_i)
                p75i.append(q3_i)
                # x_center
                c = ((bin_high[i] - bin_low[i]) / 2) + bin_low[i]
                # resolution
                d = beam.get_wavelength() / (2 * math.sin(math.pi * c /
                                                          (2 * 180)))
                x_centers.append(c)
                rmsds.append(rmsds_)
                print("%d % 5.1f % 5.1f % 8d %.1f %.1f" %
                      (i, c, d, n, med_r, med_i))
                p25r.append(q1_r)
                p50r.append(med_r)
                p75r.append(q3_r)

            # After binning, plot the results
            for plot in figures:
                ax1 = figures[plot]['ax1']
                ax2 = figures[plot]['ax2']
                if plot == 'isigi':
                    line, = ax1.plot(x_centers.as_numpy_array(),
                                     p50i.as_numpy_array(), '-')
                    line.set_label('Median')
                    ax1.fill_between(x_centers.as_numpy_array(),
                                     p25i.as_numpy_array(),
                                     p75i.as_numpy_array(),
                                     interpolate=True,
                                     alpha=0.50,
                                     color=line.get_color())
                    line, = ax2.plot(x_centers.as_numpy_array(),
                                     n_refls.as_numpy_array(),
                                     '-',
                                     color=line.get_color())
                    line.set_label('Median')
                elif plot == 'deltaXY':
                    line, = ax1.plot(x_centers.as_numpy_array(),
                                     p50r.as_numpy_array(), '-')
                    line.set_label('Median')
                    ax1.fill_between(x_centers.as_numpy_array(),
                                     p25r.as_numpy_array(),
                                     p75r.as_numpy_array(),
                                     interpolate=True,
                                     alpha=0.50,
                                     color=line.get_color())
                    line, = ax2.plot(x_centers.as_numpy_array(),
                                     n_refls.as_numpy_array(),
                                     '-',
                                     color=line.get_color())
                    line.set_label('Median')
                ax1.legend()
                ax2.legend()
예제 #7
0
    def run(self):
        ''' Parse the options. '''
        from dials.util.options import flatten_experiments, flatten_datablocks, flatten_reflections
        # Parse the command line arguments
        params, options = self.parser.parse_args(show_diff_phil=True)
        self.params = params
        experiments = flatten_experiments(params.input.experiments)
        datablocks = flatten_datablocks(params.input.datablock)
        reflections = flatten_reflections(params.input.reflections)

        # Find all detector objects
        detectors = []
        detectors.extend(experiments.detectors())
        dbs = []
        for datablock in datablocks:
            dbs.extend(datablock.unique_detectors())
        detectors.extend(dbs)

        # Verify inputs
        if len(detectors) != 2:
            print "Please provide two experiments and or datablocks for comparison"
            return

        # These lines exercise the iterate_detector_at_level and iterate_panels functions
        # for a detector with 4 hierarchy levels
        """
    print "Testing iterate_detector_at_level"
    for level in xrange(4):
      print "iterating at level", level
      for panelg in iterate_detector_at_level(detectors[0].hierarchy(), 0, level):
        print panelg.get_name()

    print "Testing iterate_panels"
    for level in xrange(4):
      print "iterating at level", level
      for panelg in iterate_detector_at_level(detectors[0].hierarchy(), 0, level):
        for panel in iterate_panels(panelg):
          print panel.get_name()
    """
        tmp = []
        for refls in reflections:
            print "N reflections total:", len(refls)
            refls = refls.select(
                refls.get_flags(refls.flags.used_in_refinement))
            print "N reflections used in refinement", len(refls)
            print "Reporting only on those reflections used in refinement"

            refls['difference_vector_norms'] = (
                refls['xyzcal.mm'] - refls['xyzobs.mm.value']).norms()
            tmp.append(refls)
        reflections = tmp

        # Iterate through the detectors, computing the congruence statistics
        delta_normals = {}
        z_angles = {}
        f_deltas = {}
        s_deltas = {}
        z_deltas = {}
        o_deltas = {}  # overall
        z_offsets_d = {}
        refl_counts = {}
        all_delta_normals = flex.double()
        all_rdelta_normals = flex.double()
        all_tdelta_normals = flex.double()
        all_z_angles = flex.double()
        all_f_deltas = flex.double()
        all_s_deltas = flex.double()
        all_z_deltas = flex.double()
        all_deltas = flex.double()
        all_refls_count = flex.int()

        all_normal_angles = flex.double()
        all_rnormal_angles = flex.double()
        all_tnormal_angles = flex.double()
        pg_normal_angle_sigmas = flex.double()
        pg_rnormal_angle_sigmas = flex.double()
        pg_tnormal_angle_sigmas = flex.double()
        all_rot_z = flex.double()
        pg_rot_z_sigmas = flex.double()
        pg_bc_dists = flex.double()
        all_bc_dist = flex.double()
        all_f_offsets = flex.double()
        all_s_offsets = flex.double()
        all_z_offsets = flex.double()
        pg_f_offset_sigmas = flex.double()
        pg_s_offset_sigmas = flex.double()
        pg_z_offset_sigmas = flex.double()
        pg_offset_sigmas = flex.double()
        all_weights = flex.double()

        congruence_table_data = []
        detector_table_data = []
        rmsds_table_data = []
        root1 = detectors[0].hierarchy()
        root2 = detectors[1].hierarchy()

        s0 = col(
            flex.vec3_double([col(b.get_s0())
                              for b in experiments.beams()]).mean())

        # Compute a set of radial and transverse displacements for each reflection
        print "Setting up stats..."
        tmp_refls = []
        for refls, expts in zip(
                reflections,
            [wrapper.data for wrapper in params.input.experiments]):
            tmp = flex.reflection_table()
            assert len(expts.detectors()) == 1
            dect = expts.detectors()[0]
            # Need to construct a variety of vectors
            for panel_id, panel in enumerate(dect):
                panel_refls = refls.select(refls['panel'] == panel_id)
                bcl = flex.vec3_double()
                # Compute the beam center in lab space (a vector pointing from the origin to where the beam would intersect
                # the panel, if it did intersect the panel)
                for expt_id in set(panel_refls['id']):
                    beam = expts[expt_id].beam
                    s0 = beam.get_s0()
                    expt_refls = panel_refls.select(
                        panel_refls['id'] == expt_id)
                    beam_centre = panel.get_beam_centre_lab(s0)
                    bcl.extend(flex.vec3_double(len(expt_refls), beam_centre))
                panel_refls['beam_centre_lab'] = bcl

                # Compute obs in lab space
                x, y, _ = panel_refls['xyzobs.mm.value'].parts()
                c = flex.vec2_double(x, y)
                panel_refls['obs_lab_coords'] = panel.get_lab_coord(c)
                # Compute deltaXY in panel space. This vector is relative to the panel origin
                x, y, _ = (panel_refls['xyzcal.mm'] -
                           panel_refls['xyzobs.mm.value']).parts()
                # Convert deltaXY to lab space, subtracting off of the panel origin
                panel_refls['delta_lab_coords'] = panel.get_lab_coord(
                    flex.vec2_double(x, y)) - panel.get_origin()
                tmp.extend(panel_refls)
            refls = tmp
            # The radial vector points from the center of the reflection to the beam center
            radial_vectors = (refls['obs_lab_coords'] -
                              refls['beam_centre_lab']).each_normalize()
            # The transverse vector is orthogonal to the radial vector and the beam vector
            transverse_vectors = radial_vectors.cross(
                refls['beam_centre_lab']).each_normalize()
            # Compute the raidal and transverse components of each deltaXY
            refls['radial_displacements'] = refls['delta_lab_coords'].dot(
                radial_vectors)
            refls['transverse_displacements'] = refls['delta_lab_coords'].dot(
                transverse_vectors)

            tmp_refls.append(refls)
        reflections = tmp_refls

        for pg_id, (pg1, pg2) in enumerate(
                zip(
                    iterate_detector_at_level(root1, 0,
                                              params.hierarchy_level),
                    iterate_detector_at_level(root2, 0,
                                              params.hierarchy_level))):
            """ First compute statistics for detector congruence """
            # Count up the number of reflections in this panel group pair for use as a weighting scheme
            total_refls = 0
            pg1_refls = 0
            pg2_refls = 0
            for p1, p2 in zip(iterate_panels(pg1), iterate_panels(pg2)):
                r1 = len(reflections[0].select(
                    reflections[0]['panel'] == id_from_name(
                        detectors[0], p1.get_name())))
                r2 = len(reflections[1].select(
                    reflections[1]['panel'] == id_from_name(
                        detectors[1], p2.get_name())))
                total_refls += r1 + r2
                pg1_refls += r1
                pg2_refls += r2
            if pg1_refls == 0 and pg2_refls == 0:
                print "No reflections on panel group", pg_id
                continue

            assert pg1.get_name() == pg2.get_name()
            refl_counts[pg1.get_name()] = total_refls

            row = ["%d" % pg_id]
            for pg, refls, det in zip([pg1, pg2], reflections, detectors):
                pg_refls = flex.reflection_table()
                for p in iterate_panels(pg):
                    pg_refls.extend(
                        refls.select(
                            refls['panel'] == id_from_name(det, p.get_name())))
                if len(pg_refls) == 0:
                    rmsd = r_rmsd = t_rmsd = 0
                else:
                    rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['difference_vector_norms']) /
                        len(pg_refls)) * 1000
                    r_rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['radial_displacements']) /
                        len(pg_refls)) * 1000
                    t_rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['transverse_displacements']) /
                        len(pg_refls)) * 1000

                row.extend([
                    "%6.1f" % rmsd,
                    "%6.1f" % r_rmsd,
                    "%6.1f" % t_rmsd,
                    "%8d" % len(pg_refls)
                ])
            rmsds_table_data.append(row)

            # Angle between normals of pg1 and pg2
            delta_norm_angle = col(pg1.get_normal()).angle(col(
                pg2.get_normal()),
                                                           deg=True)
            all_delta_normals.append(delta_norm_angle)

            # compute radial and transverse components of the delta between normal angles
            pgo = (get_center(pg1) + get_center(pg2)) / 2
            ro = (get_center(root1) + get_center(root2)) / 2
            rn = (col(root1.get_normal()) + col(root2.get_normal())) / 2
            rf = (col(root1.get_fast_axis()) + col(root2.get_fast_axis())) / 2
            rs = (col(root1.get_slow_axis()) + col(root2.get_slow_axis())) / 2

            ro_pgo = pgo - ro  # vector from the detector origin to the average panel group origin
            if ro_pgo.length() == 0:
                radial = col((0, 0, 0))
                transverse = col((0, 0, 0))
            else:
                radial = ((rf.dot(ro_pgo) * rf) + (rs.dot(ro_pgo) * rs)
                          ).normalize()  # component of ro_pgo in rf rs plane
                transverse = rn.cross(radial).normalize()
            # now radial and transverse are vectors othogonal to each other and the detector normal, such that
            # radial points at the panel group origin
            # v1 and v2 are the components of pg 1 and 2 normals in the rn radial plane
            v1 = (radial.dot(col(pg1.get_normal())) *
                  radial) + (rn.dot(col(pg1.get_normal())) * rn)
            v2 = (radial.dot(col(pg2.get_normal())) *
                  radial) + (rn.dot(col(pg2.get_normal())) * rn)
            rdelta_norm_angle = v1.angle(v2, deg=True)
            if v1.cross(v2).dot(transverse) < 0:
                rdelta_norm_angle = -rdelta_norm_angle
            all_rdelta_normals.append(rdelta_norm_angle)
            # v1 and v2 are the components of pg 1 and 2 normals in the rn transverse plane
            v1 = (transverse.dot(col(pg1.get_normal())) *
                  transverse) + (rn.dot(col(pg1.get_normal())) * rn)
            v2 = (transverse.dot(col(pg2.get_normal())) *
                  transverse) + (rn.dot(col(pg2.get_normal())) * rn)
            tdelta_norm_angle = v1.angle(v2, deg=True)
            if v1.cross(v2).dot(radial) < 0:
                tdelta_norm_angle = -tdelta_norm_angle
            all_tdelta_normals.append(tdelta_norm_angle)

            # compute the angle between fast axes of these panel groups
            z_angle = col(pg1.get_fast_axis()[0:2]).angle(col(
                pg2.get_fast_axis()[0:2]),
                                                          deg=True)
            all_z_angles.append(z_angle)
            z_angles[pg1.get_name()] = z_angle

            all_refls_count.append(total_refls)
            all_weights.append(pg1_refls)
            all_weights.append(pg2_refls)
            """ Now compute statistics measuring the reality of the detector. For example, instead of the distance between two things,
      we are concerned with the location of those things relative to laboratory space """
            # Compute distances between panel groups and beam center
            # Also compute offset along Z axis
            dists = flex.double()
            f_offsets = flex.double()
            s_offsets = flex.double()
            z_offsets = flex.double()
            for pg, r in zip([pg1, pg2], [root1, root2]):
                bc = col(pg.get_beam_centre_lab(s0))
                ori = get_center(pg)

                dists.append((ori - bc).length())

                rori = col(r.get_origin())
                delta_ori = ori - rori
                r_norm = col(r.get_normal())
                r_fast = col(r.get_fast_axis())
                r_slow = col(r.get_slow_axis())
                f_offsets.append(r_fast.dot(delta_ori) * 1000)
                s_offsets.append(r_slow.dot(delta_ori) * 1000)
                z_offsets.append(r_norm.dot(delta_ori) * 1000)

            fd = abs(f_offsets[0] - f_offsets[1])
            sd = abs(s_offsets[0] - s_offsets[1])
            zd = abs(z_offsets[0] - z_offsets[1])
            od = math.sqrt(fd**2 + sd**2 + zd**2)
            f_deltas[pg1.get_name()] = fd
            s_deltas[pg1.get_name()] = sd
            z_deltas[pg1.get_name()] = zd
            o_deltas[pg1.get_name()] = od
            all_f_deltas.append(fd)
            all_s_deltas.append(sd)
            all_z_deltas.append(zd)
            all_deltas.append(od)

            all_f_offsets.extend(f_offsets)
            all_s_offsets.extend(s_offsets)
            all_z_offsets.extend(z_offsets)

            # Compute angle between detector normal and panel group normal
            # Compute rotation of panel group around detector normal
            pg_rotz = flex.double()
            norm_angles = flex.double()
            rnorm_angles = flex.double()
            tnorm_angles = flex.double()
            for pg, r in zip([pg1, pg2], [root1, root2]):

                pgo = get_center(pg)
                pgn = col(pg.get_normal())
                pgf = col(pg.get_fast_axis())

                ro = get_center(r)
                rn = col(r.get_normal())
                rf = col(r.get_fast_axis())
                rs = col(r.get_slow_axis())

                norm_angle = rn.angle(pgn, deg=True)
                norm_angles.append(norm_angle)
                all_normal_angles.append(norm_angle)

                ro_pgo = pgo - ro  # vector from the detector origin to the panel group origin
                if ro_pgo.length() == 0:
                    radial = col((0, 0, 0))
                    transverse = col((0, 0, 0))
                else:
                    radial = (
                        (rf.dot(ro_pgo) * rf) + (rs.dot(ro_pgo) * rs)
                    ).normalize()  # component of ro_pgo in rf rs plane
                    transverse = rn.cross(radial).normalize()
                # now radial and transverse are vectors othogonal to each other and the detector normal, such that
                # radial points at the panel group origin
                # v is the component of pgn in the rn radial plane
                v = (radial.dot(pgn) * radial) + (rn.dot(pgn) * rn)
                angle = rn.angle(v, deg=True)
                if rn.cross(v).dot(transverse) < 0:
                    angle = -angle
                rnorm_angles.append(angle)
                all_rnormal_angles.append(angle)
                # v is the component of pgn in the rn transverse plane
                v = (transverse.dot(pgn) * transverse) + (rn.dot(pgn) * rn)
                angle = rn.angle(v, deg=True)
                if rn.cross(v).dot(radial) < 0:
                    angle = -angle
                tnorm_angles.append(angle)
                all_tnormal_angles.append(angle)

                # v is the component of pgf in the rf rs plane
                v = (rf.dot(pgf) * rf) + (rs.dot(pgf) * rs)
                angle = rf.angle(v, deg=True)
                angle = angle - (round(angle / 90) * 90
                                 )  # deviation from 90 degrees
                pg_rotz.append(angle)
                all_rot_z.append(angle)

            # Set up table rows using stats aggregated from above
            pg_weights = flex.double([pg1_refls, pg2_refls])
            if 0 in pg_weights:
                dist_m = dist_s = norm_angle_m = norm_angle_s = rnorm_angle_m = rnorm_angle_s = 0
                tnorm_angle_m = tnorm_angle_s = rotz_m = rotz_s = 0
                fo_m = fo_s = so_m = so_s = zo_m = zo_s = o_s = 0

            else:
                stats = flex.mean_and_variance(dists, pg_weights)
                dist_m = stats.mean()
                dist_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(norm_angles, pg_weights)
                norm_angle_m = stats.mean()
                norm_angle_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(rnorm_angles, pg_weights)
                rnorm_angle_m = stats.mean()
                rnorm_angle_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(tnorm_angles, pg_weights)
                tnorm_angle_m = stats.mean()
                tnorm_angle_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(pg_rotz, pg_weights)
                rotz_m = stats.mean()
                rotz_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(f_offsets, pg_weights)
                fo_m = stats.mean()
                fo_s = stats.gsl_stats_wsd()
                stats = flex.mean_and_variance(s_offsets, pg_weights)
                so_m = stats.mean()
                so_s = stats.gsl_stats_wsd()
                stats = flex.mean_and_variance(z_offsets, pg_weights)
                zo_m = stats.mean()
                zo_s = stats.gsl_stats_wsd()

                o_s = math.sqrt(fo_s**2 + so_s**2 + zo_s**2)

            pg_bc_dists.append(dist_m)
            all_bc_dist.extend(dists)
            pg_normal_angle_sigmas.append(norm_angle_s)
            pg_rnormal_angle_sigmas.append(rnorm_angle_s)
            pg_tnormal_angle_sigmas.append(tnorm_angle_s)
            pg_rot_z_sigmas.append(rotz_s)
            pg_f_offset_sigmas.append(fo_s)
            pg_s_offset_sigmas.append(so_s)
            pg_z_offset_sigmas.append(zo_s)
            pg_offset_sigmas.append(o_s)
            z_offsets_d[pg1.get_name()] = zo_m

            congruence_table_data.append([
                "%d" % pg_id,
                "%5.1f" % dist_m,  #"%.4f"%dist_s,
                "%.4f" % delta_norm_angle,
                "%.4f" % rdelta_norm_angle,
                "%.4f" % tdelta_norm_angle,
                "%.4f" % z_angle,
                "%4.1f" % fd,
                "%4.1f" % sd,
                "%4.1f" % zd,
                "%4.1f" % od,
                "%6d" % total_refls
            ])
            detector_table_data.append([
                "%d" % pg_id,
                "%5.1f" % dist_m,  #"%.4f"%dist_s,
                "%.4f" % norm_angle_m,
                "%.4f" % norm_angle_s,
                "%.4f" % rnorm_angle_m,
                "%.4f" % rnorm_angle_s,
                "%.4f" % tnorm_angle_m,
                "%.4f" % tnorm_angle_s,
                "%10.6f" % rotz_m,
                "%.6f" % rotz_s,
                #"%9.1f"%fo_m, "%5.3f"%fo_s,
                #"%9.1f"%so_m, "%5.3f"%so_s,
                "%9.3f" % fo_s,
                "%9.3f" % so_s,
                "%9.1f" % zo_m,
                "%9.1f" % zo_s,
                "%9.3f" % o_s,
                "%6d" % total_refls
            ])

        # Set up table output
        table_d = {
            d: row
            for d, row in zip(pg_bc_dists, congruence_table_data)
        }
        table_header = [
            "PanelG", "Dist", "Normal", "RNormal", "TNormal", "Z rot", "Delta",
            "Delta", "Delta", "Delta", "N"
        ]
        table_header2 = [
            "Id", "", "Angle", "Angle", "Angle", "Angle", "F", "S", "Z", "O",
            "Refls"
        ]
        table_header3 = [
            "", "(mm)", "(mm)", "(deg)", "(deg)", "(microns)", "(microns)",
            "(microns)", "(microns)", "(microns)", ""
        ]
        congruence_table_data = [table_header, table_header2, table_header3]
        congruence_table_data.extend([table_d[key] for key in sorted(table_d)])

        table_d = {d: row for d, row in zip(pg_bc_dists, detector_table_data)}
        table_header = [
            "PanelG", "Dist", "Normal", "Normal", "RNormal", "RNormal",
            "TNormal", "TNormal", "RotZ", "RotZ", "F Offset", "S Offset",
            "Z Offset", "Z Offset", "Offset", "N"
        ]
        table_header2 = [
            "Id", "", "", "Sigma", "", "Sigma", "", "Sigma", "", "Sigma",
            "Sigma", "Sigma", "", "Sigma", "Sigma", "Refls"
        ]
        table_header3 = [
            "", "(mm)", "(deg)", "(deg)", "(deg)", "(deg)", "(deg)", "(deg)",
            "(deg)", "(deg)", "(microns)", "(microns)", "(microns)",
            "(microns)", "(microns)", ""
        ]
        detector_table_data = [table_header, table_header2, table_header3]
        detector_table_data.extend([table_d[key] for key in sorted(table_d)])

        table_d = {d: row for d, row in zip(pg_bc_dists, rmsds_table_data)}
        table_header = ["PanelG"]
        table_header2 = ["Id"]
        table_header3 = [""]
        for i in xrange(len(detectors)):
            table_header.extend(["D%d" % i] * 4)
            table_header2.extend(["RMSD", "rRMSD", "tRMSD", "N refls"])
            table_header3.extend(["(microns)"] * 3)
            table_header3.append("")
        rmsds_table_data = [table_header, table_header2, table_header3]
        rmsds_table_data.extend([table_d[key] for key in sorted(table_d)])

        if len(all_refls_count) > 1:
            r1 = ["Weighted mean"]
            r2 = ["Weighted stddev"]
            r1.append("")
            r2.append("")
            #r1.append("")
            #r2.append("")
            stats = flex.mean_and_variance(all_delta_normals,
                                           all_refls_count.as_double())
            r1.append("%.4f" % stats.mean())
            r2.append("%.4f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_rdelta_normals,
                                           all_refls_count.as_double())
            r1.append("%.4f" % stats.mean())
            r2.append("%.4f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_tdelta_normals,
                                           all_refls_count.as_double())
            r1.append("%.4f" % stats.mean())
            r2.append("%.4f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_z_angles,
                                           all_refls_count.as_double())
            r1.append("%.4f" % stats.mean())
            r2.append("%.4f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_f_deltas,
                                           all_refls_count.as_double())
            r1.append("%4.1f" % stats.mean())
            r2.append("%4.1f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_s_deltas,
                                           all_refls_count.as_double())
            r1.append("%4.1f" % stats.mean())
            r2.append("%4.1f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_z_deltas,
                                           all_refls_count.as_double())
            r1.append("%4.1f" % stats.mean())
            r2.append("%4.1f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_deltas,
                                           all_refls_count.as_double())
            r1.append("%4.1f" % stats.mean())
            r2.append("%4.1f" % stats.gsl_stats_wsd())
            r1.append("")
            r2.append("")
            congruence_table_data.append(r1)
            congruence_table_data.append(r2)
            congruence_table_data.append([
                "Mean", "", "", "", "", "", "", "", "", "", "",
                "%6.1f" % flex.mean(all_refls_count.as_double())
            ])

        from libtbx import table_utils
        print "Congruence statistics, I.E. the differences between the input detectors:"
        print table_utils.format(congruence_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")

        print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, statistics are computed between the matching panel groups between the two input experiments."
        print "Dist: distance from center of panel group to the beam center"
        print "Dist Sigma: weighted standard deviation of the measurements used to compute Dist"
        print "Normal angle: angle between the normal vectors of matching panel groups."
        print "RNormal angle: radial component of the angle between the normal vectors of matching panel groups"
        print "TNormal angle: transverse component of the angle between the normal vectors of matching panel groups"
        print "Z rot: angle between the XY components of the fast axes of the panel groups."
        print "Delta F: shift between matching panel groups along the detector fast axis."
        print "Delta S: shift between matching panel groups along the detector slow axis."
        print "Delta Z: Z shift between matching panel groups along the detector normal."
        print "Delta O: Overall shift between matching panel groups along the detector normal."
        print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
        print
        print

        if len(all_weights) > 1:
            r1 = ["All"]
            r2 = ["Mean"]
            for data, weights, fmt in [
                [None, None, None],
                    #[None,None,None],
                [all_normal_angles,
                 all_weights.as_double(), "%.4f"],
                [pg_normal_angle_sigmas,
                 all_refls_count.as_double(), "%.4f"],
                [all_rnormal_angles,
                 all_weights.as_double(), "%.4f"],
                [pg_rnormal_angle_sigmas,
                 all_refls_count.as_double(), "%.4f"],
                [all_tnormal_angles,
                 all_weights.as_double(), "%.4f"],
                [pg_tnormal_angle_sigmas,
                 all_refls_count.as_double(), "%.4f"],
                [all_rot_z, all_weights.as_double(), "%10.6f"],
                [pg_rot_z_sigmas,
                 all_refls_count.as_double(), "%.6f"],
                    #[all_f_offsets,           all_weights.as_double(),     "%9.1f"],
                [pg_f_offset_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                    #[all_s_offsets,           all_weights.as_double(),     "%9.1f"],
                [pg_s_offset_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                [all_z_offsets,
                 all_weights.as_double(), "%9.1f"],
                [pg_z_offset_sigmas,
                 all_refls_count.as_double(), "%9.1f"],
                [pg_offset_sigmas,
                 all_refls_count.as_double(), "%9.1f"]
            ]:

                r2.append("")
                if data is None and weights is None:
                    r1.append("")
                    continue
                stats = flex.mean_and_variance(data, weights)
                r1.append(fmt % stats.mean())

            r1.append("")
            r2.append("%6.1f" % flex.mean(all_refls_count.as_double()))
            detector_table_data.append(r1)
            detector_table_data.append(r2)

        print "Detector statistics, I.E. measurements of parameters relative to the detector plane:"
        print table_utils.format(detector_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")

        print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
        print "Dist: distance from center of panel group to the beam center"
        print "Dist Sigma: weighted standard deviation of the measurements used to compute Dist"
        print "Normal Angle: angle between the normal vector of the detector at its root hierarchy level and the normal of the panel group"
        print "RNormal Angle: radial component of Normal Angle"
        print "TNormal Angle: transverse component of Normal Angle"
        print "RotZ: deviation from 90 degrees of the rotation of each panel group around the detector normal"
        print "F Offset: offset of panel group along the detector's fast axis"
        print "S Offset: offset of panel group along the detector's slow axis"
        print "Z Offset: offset of panel group along the detector normal"
        print "Offset: offset of panel group in F,S,Z space. Sigma is F, S, Z offset sigmas summed in quadrature."
        print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
        print "All: weighted mean of the values shown"
        print
        print "Sigmas in this table are computed using the standard deviation of 2 measurements (I.E. a panel's Z Offset is measured twice, once in each input dataset). This is related by a factor of sqrt(2)/2 to the mean of the Delta Z parameter in the congruence statistics table above, which is the difference between Z parameters."
        print

        row = ["Overall"]
        for refls in reflections:
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['difference_vector_norms']) / len(refls)) *
                                  1000))
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['radial_displacements']) / len(refls)) *
                                  1000))
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['transverse_displacements']) / len(refls)) *
                                  1000))
            row.append("%8d" % len(refls))
        rmsds_table_data.append(row)

        print "RMSDs by detector number"
        print table_utils.format(rmsds_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")
        print "PanelG Id: panel group id or panel id, depending on hierarchy_level"
        print "RMSD: root mean squared deviation between observed and predicted spot locations"
        print "rRMSD: RMSD of radial components of the observed-predicted vectors"
        print "tRMSD: RMSD of transverse components of the observed-predicted vectors"
        print "N refls: number of reflections"

        # Show stats for detector hierarchy root
        def _print_vector(v):
            for i in v:
                print "%10.5f" % i,
            print

        for d_id, d in enumerate(detectors):
            ori = d.hierarchy().get_origin()
            norm = d.hierarchy().get_normal()
            fast = d.hierarchy().get_fast_axis()
            slow = d.hierarchy().get_slow_axis()
            print "Detector", d_id, "origin:   ",
            _print_vector(ori)
            print "Detector", d_id, "normal:   ",
            _print_vector(norm)
            print "Detector", d_id, "fast axis:",
            _print_vector(fast)
            print "Detector", d_id, "slow axis:",
            _print_vector(slow)

        # Unit cell statstics
        lengths = flex.vec3_double()
        angles = flex.vec3_double()
        weights = flex.double()
        for refls, expts in zip(reflections,
                                [d.data for d in params.input.experiments]):
            for crystal_id, crystal in enumerate(expts.crystals()):
                lengths.append(crystal.get_unit_cell().parameters()[0:3])
                angles.append(crystal.get_unit_cell().parameters()[3:6])
                weights.append(len(refls.select(refls['id'] == crystal_id)))

        print "Unit cell stats (angstroms and degrees), weighted means and standard deviations"
        for subset, tags in zip([lengths, angles],
                                [["Cell a", "Cell b", "Cell c"],
                                 ["Cell alpha", "Cell beta", "Cell gamma"]]):
            for data, tag in zip(subset.parts(), tags):
                stats = flex.mean_and_variance(data, weights)
                print "%s %5.1f +/- %6.3f" % (tag, stats.mean(),
                                              stats.gsl_stats_wsd())

        if params.tag is None:
            tag = ""
        else:
            tag = "%s " % params.tag

        if params.show_plots:
            # Plot the results
            detector_plot_dict(self.params,
                               detectors[0],
                               refl_counts,
                               u"%sN reflections" % tag,
                               u"%6d",
                               show=False)
            #detector_plot_dict(self.params, detectors[0], delta_normals, u"%sAngle between normal vectors (\N{DEGREE SIGN})"%tag, u"%.2f\N{DEGREE SIGN}", show=False)
            detector_plot_dict(
                self.params,
                detectors[0],
                z_angles,
                u"%sZ rotation angle between panels (\N{DEGREE SIGN})" % tag,
                u"%.2f\N{DEGREE SIGN}",
                show=False)
            detector_plot_dict(
                self.params,
                detectors[0],
                f_deltas,
                u"%sFast displacements between panels (microns)" % tag,
                u"%4.1f",
                show=False)
            detector_plot_dict(
                self.params,
                detectors[0],
                s_deltas,
                u"%sSlow displacements between panels (microns)" % tag,
                u"%4.1f",
                show=False)
            detector_plot_dict(self.params,
                               detectors[0],
                               z_offsets_d,
                               u"%sZ offsets along detector normal (microns)" %
                               tag,
                               u"%4.1f",
                               show=False)
            detector_plot_dict(self.params,
                               detectors[0],
                               z_deltas,
                               u"%sZ displacements between panels (microns)" %
                               tag,
                               u"%4.1f",
                               show=False)
            detector_plot_dict(
                self.params,
                detectors[0],
                o_deltas,
                u"%sOverall displacements between panels (microns)" % tag,
                u"%4.1f",
                show=False)
            plt.show()
예제 #8
0
    def run(self):
        ''' Parse the options. '''
        from dials.util.options import flatten_experiments, flatten_reflections
        # Parse the command line arguments
        params, options = self.parser.parse_args(show_diff_phil=True)
        self.params = params
        experiments = flatten_experiments(params.input.experiments)
        reflections = flatten_reflections(params.input.reflections)

        # Find all detector objects
        detectors = []
        detectors.extend(experiments.detectors())

        # Verify inputs
        if len(detectors) != 2:
            print "Please provide two experiments for comparison"
            return

        # These lines exercise the iterate_detector_at_level and iterate_panels functions
        # for a detector with 4 hierarchy levels
        """
    print "Testing iterate_detector_at_level"
    for level in range(4):
      print "iterating at level", level
      for panelg in iterate_detector_at_level(detectors[0].hierarchy(), 0, level):
        print panelg.get_name()

    print "Testing iterate_panels"
    for level in range(4):
      print "iterating at level", level
      for panelg in iterate_detector_at_level(detectors[0].hierarchy(), 0, level):
        for panel in iterate_panels(panelg):
          print panel.get_name()
    """
        tmp = []
        for refls in reflections:
            print "N reflections total:", len(refls)
            sel = refls.get_flags(refls.flags.used_in_refinement)
            if sel.count(True) > 0:
                refls = refls.select(sel)
                print "N reflections used in refinement", len(refls)
                print "Reporting only on those reflections used in refinement"

            refls['difference_vector_norms'] = (
                refls['xyzcal.mm'] - refls['xyzobs.mm.value']).norms()
            tmp.append(refls)
        reflections = tmp

        s0 = col(
            flex.vec3_double([col(b.get_s0())
                              for b in experiments.beams()]).mean())

        # Compute a set of radial and transverse displacements for each reflection
        print "Setting up stats..."
        tmp_refls = []
        for refls, expts in zip(
                reflections,
            [wrapper.data for wrapper in params.input.experiments]):
            tmp = flex.reflection_table()
            assert len(expts.detectors()) == 1
            dect = expts.detectors()[0]
            # Need to construct a variety of vectors
            for panel_id, panel in enumerate(dect):
                panel_refls = refls.select(refls['panel'] == panel_id)
                bcl = flex.vec3_double()
                # Compute the beam center in lab space (a vector pointing from the origin to where the beam would intersect
                # the panel, if it did intersect the panel)
                for expt_id in set(panel_refls['id']):
                    beam = expts[expt_id].beam
                    s0_ = beam.get_s0()
                    expt_refls = panel_refls.select(
                        panel_refls['id'] == expt_id)
                    beam_centre = panel.get_beam_centre_lab(s0_)
                    bcl.extend(flex.vec3_double(len(expt_refls), beam_centre))
                panel_refls['beam_centre_lab'] = bcl

                # Compute obs in lab space
                x, y, _ = panel_refls['xyzobs.mm.value'].parts()
                c = flex.vec2_double(x, y)
                panel_refls['obs_lab_coords'] = panel.get_lab_coord(c)
                # Compute deltaXY in panel space. This vector is relative to the panel origin
                x, y, _ = (panel_refls['xyzcal.mm'] -
                           panel_refls['xyzobs.mm.value']).parts()
                # Convert deltaXY to lab space, subtracting off of the panel origin
                panel_refls['delta_lab_coords'] = panel.get_lab_coord(
                    flex.vec2_double(x, y)) - panel.get_origin()
                tmp.extend(panel_refls)
            refls = tmp
            # The radial vector points from the center of the reflection to the beam center
            radial_vectors = (refls['obs_lab_coords'] -
                              refls['beam_centre_lab']).each_normalize()
            # The transverse vector is orthogonal to the radial vector and the beam vector
            transverse_vectors = radial_vectors.cross(
                refls['beam_centre_lab']).each_normalize()
            # Compute the raidal and transverse components of each deltaXY
            refls['radial_displacements'] = refls['delta_lab_coords'].dot(
                radial_vectors)
            refls['transverse_displacements'] = refls['delta_lab_coords'].dot(
                transverse_vectors)

            tmp_refls.append(refls)
        reflections = tmp_refls

        # storage for plots
        refl_counts = {}

        # Data for all tables
        pg_bc_dists = flex.double()
        root1 = detectors[0].hierarchy()
        root2 = detectors[1].hierarchy()
        all_weights = flex.double()
        all_refls_count = flex.int()

        # Data for lab space table
        lab_table_data = []
        lab_delta_table_data = []
        all_lab_x = flex.double()
        all_lab_y = flex.double()
        all_lab_z = flex.double()
        pg_lab_x_sigmas = flex.double()
        pg_lab_y_sigmas = flex.double()
        pg_lab_z_sigmas = flex.double()
        all_rotX = flex.double()
        all_rotY = flex.double()
        all_rotZ = flex.double()
        pg_rotX_sigmas = flex.double()
        pg_rotY_sigmas = flex.double()
        pg_rotZ_sigmas = flex.double()
        all_delta_x = flex.double()
        all_delta_y = flex.double()
        all_delta_z = flex.double()
        all_delta_xy = flex.double()
        all_delta_xyz = flex.double()
        all_delta_r = flex.double()
        all_delta_t = flex.double()
        all_delta_norm = flex.double()

        if params.hierarchy_level > 0:
            # Data for local table
            local_table_data = []
            local_delta_table_data = []
            all_local_x = flex.double()
            all_local_y = flex.double()
            all_local_z = flex.double()
            pg_local_x_sigmas = flex.double()
            pg_local_y_sigmas = flex.double()
            pg_local_z_sigmas = flex.double()
            all_local_rotX = flex.double()
            all_local_rotY = flex.double()
            all_local_rotZ = flex.double()
            pg_local_rotX_sigmas = flex.double()
            pg_local_rotY_sigmas = flex.double()
            pg_local_rotZ_sigmas = flex.double()
            all_local_delta_x = flex.double()
            all_local_delta_y = flex.double()
            all_local_delta_z = flex.double()
            all_local_delta_xy = flex.double()
            all_local_delta_xyz = flex.double()

        # Data for RMSD table
        rmsds_table_data = []

        for pg_id, (pg1, pg2) in enumerate(
                zip(
                    iterate_detector_at_level(root1, 0,
                                              params.hierarchy_level),
                    iterate_detector_at_level(root2, 0,
                                              params.hierarchy_level))):
            # Count up the number of reflections in this panel group pair for use as a weighting scheme
            total_refls = 0
            pg1_refls = 0
            pg2_refls = 0
            for p1, p2 in zip(iterate_panels(pg1), iterate_panels(pg2)):
                r1 = len(reflections[0].select(
                    reflections[0]['panel'] == id_from_name(
                        detectors[0], p1.get_name())))
                r2 = len(reflections[1].select(
                    reflections[1]['panel'] == id_from_name(
                        detectors[1], p2.get_name())))
                total_refls += r1 + r2
                pg1_refls += r1
                pg2_refls += r2
            if pg1_refls == 0 and pg2_refls == 0:
                print "No reflections on panel group", pg_id
                continue
            all_refls_count.append(total_refls)
            all_weights.append(pg1_refls)
            all_weights.append(pg2_refls)

            assert pg1.get_name() == pg2.get_name()
            refl_counts[pg1.get_name()] = total_refls

            # Compute RMSDs
            row = ["%d" % pg_id]
            for pg, refls, det in zip([pg1, pg2], reflections, detectors):
                pg_refls = flex.reflection_table()
                for p in iterate_panels(pg):
                    pg_refls.extend(
                        refls.select(
                            refls['panel'] == id_from_name(det, p.get_name())))
                if len(pg_refls) == 0:
                    rmsd = r_rmsd = t_rmsd = 0
                else:
                    rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['difference_vector_norms']) /
                        len(pg_refls)) * 1000
                    r_rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['radial_displacements']) /
                        len(pg_refls)) * 1000
                    t_rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['transverse_displacements']) /
                        len(pg_refls)) * 1000

                row.extend([
                    "%6.1f" % rmsd,
                    "%6.1f" % r_rmsd,
                    "%6.1f" % t_rmsd,
                    "%8d" % len(pg_refls)
                ])
            rmsds_table_data.append(row)

            dists = flex.double()
            lab_x = flex.double()
            lab_y = flex.double()
            lab_z = flex.double()
            rot_X = flex.double()
            rot_Y = flex.double()
            rot_Z = flex.double()

            for pg in [pg1, pg2]:
                bc = col(pg.get_beam_centre_lab(s0))
                ori = get_center(pg)

                dists.append((ori - bc).length())

                ori_lab = pg.get_origin()
                lab_x.append(ori_lab[0])
                lab_y.append(ori_lab[1])
                lab_z.append(ori_lab[2])

                f = col(pg.get_fast_axis())
                s = col(pg.get_slow_axis())
                n = col(pg.get_normal())
                basis = sqr(
                    [f[0], s[0], n[0], f[1], s[1], n[1], f[2], s[2], n[2]])
                rotX, rotY, rotZ = basis.r3_rotation_matrix_as_x_y_z_angles(
                    deg=True)
                rot_X.append(rotX)
                rot_Y.append(rotY)
                rot_Z.append(rotZ)

            all_lab_x.extend(lab_x)
            all_lab_y.extend(lab_y)
            all_lab_z.extend(lab_z)
            all_rotX.extend(rot_X)
            all_rotY.extend(rot_Y)
            all_rotZ.extend(rot_Z)

            pg_weights = flex.double([pg1_refls, pg2_refls])
            if 0 in pg_weights:
                dist_m = dist_s = 0
                lx_m = lx_s = ly_m = ly_s = lz_m = lz_s = 0
                lrx_m = lrx_s = lry_m = lry_s = lrz_m = lrz_s = 0
                dx = dy = dz = dxy = dxyz = dr = dt = dnorm = 0
            else:
                stats = flex.mean_and_variance(dists, pg_weights)
                dist_m = stats.mean()
                dist_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(lab_x, pg_weights)
                lx_m = stats.mean()
                lx_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(lab_y, pg_weights)
                ly_m = stats.mean()
                ly_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(lab_z, pg_weights)
                lz_m = stats.mean()
                lz_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(rot_X, pg_weights)
                lrx_m = stats.mean()
                lrx_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(rot_Y, pg_weights)
                lry_m = stats.mean()
                lry_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(rot_Z, pg_weights)
                lrz_m = stats.mean()
                lrz_s = stats.gsl_stats_wsd()

                dx = lab_x[0] - lab_x[1]
                dy = lab_y[0] - lab_y[1]
                dz = lab_z[0] - lab_z[1]
                dxy = math.sqrt(dx**2 + dy**2)
                dxyz = math.sqrt(dx**2 + dy**2 + dz**2)

                delta = col([lab_x[0], lab_y[0], lab_z[0]]) - col(
                    [lab_x[1], lab_y[1], lab_z[1]])
                pg1_center = get_center_lab(pg1).normalize()
                transverse = s0.cross(pg1_center).normalize()
                radial = transverse.cross(s0).normalize()
                dr = delta.dot(radial)
                dt = delta.dot(transverse)
                dnorm = col(pg1.get_normal()).angle(col(pg2.get_normal()),
                                                    deg=True)

            pg_bc_dists.append(dist_m)
            pg_lab_x_sigmas.append(lx_s)
            pg_lab_y_sigmas.append(ly_s)
            pg_lab_z_sigmas.append(lz_s)
            pg_rotX_sigmas.append(lrx_s)
            pg_rotY_sigmas.append(lry_s)
            pg_rotZ_sigmas.append(lrz_s)
            all_delta_x.append(dx)
            all_delta_y.append(dy)
            all_delta_z.append(dz)
            all_delta_xy.append(dxy)
            all_delta_xyz.append(dxyz)
            all_delta_r.append(dr)
            all_delta_t.append(dt)
            all_delta_norm.append(dnorm)

            lab_table_data.append([
                "%d" % pg_id,
                "%5.1f" % dist_m,
                "%9.3f" % lx_m,
                "%9.3f" % lx_s,
                "%9.3f" % ly_m,
                "%9.3f" % ly_s,
                "%9.3f" % lz_m,
                "%9.3f" % lz_s,
                "%9.3f" % lrx_m,
                "%9.3f" % lrx_s,
                "%9.3f" % lry_m,
                "%9.3f" % lry_s,
                "%9.3f" % lrz_m,
                "%9.3f" % lrz_s,
                "%6d" % total_refls
            ])

            lab_delta_table_data.append([
                "%d" % pg_id,
                "%5.1f" % dist_m,
                "%9.1f" % (dx * 1000),
                "%9.1f" % (dy * 1000),
                "%9.3f" % dz,
                "%9.1f" % (dxy * 1000),
                "%9.3f" % dxyz,
                "%9.1f" % (dr * 1000),
                "%9.1f" % (dt * 1000),
                "%9.3f" % dnorm,
                "%6d" % total_refls
            ])

            if params.hierarchy_level > 0:
                local_x = flex.double()
                local_y = flex.double()
                local_z = flex.double()
                l_rot_X = flex.double()
                l_rot_Y = flex.double()
                l_rot_Z = flex.double()
                l_dx = flex.double()
                l_dy = flex.double()
                l_dz = flex.double()
                l_dxy = flex.double()
                l_dxyz = flex.double()

                for pg in [pg1, pg2]:

                    l_ori = pg.get_local_origin()
                    local_x.append(l_ori[0])
                    local_y.append(l_ori[1])
                    local_z.append(l_ori[2])

                    f = col(pg.get_local_fast_axis())
                    s = col(pg.get_local_slow_axis())
                    n = f.cross(s)
                    basis = sqr(
                        [f[0], s[0], n[0], f[1], s[1], n[1], f[2], s[2], n[2]])
                    rotX, rotY, rotZ = basis.r3_rotation_matrix_as_x_y_z_angles(
                        deg=True)
                    l_rot_X.append(rotX)
                    l_rot_Y.append(rotY)
                    l_rot_Z.append(rotZ)

                all_local_x.extend(local_x)
                all_local_y.extend(local_y)
                all_local_z.extend(local_z)
                all_local_rotX.extend(l_rot_X)
                all_local_rotY.extend(l_rot_Y)
                all_local_rotZ.extend(l_rot_Z)

                pg_weights = flex.double([pg1_refls, pg2_refls])
                if 0 in pg_weights:
                    lx_m = lx_s = ly_m = ly_s = lz_m = lz_s = 0
                    lrx_m = lrx_s = lry_m = lry_s = lrz_m = lrz_s = 0
                    ldx = ldy = ldz = ldxy = ldxyz = 0
                else:
                    stats = flex.mean_and_variance(local_x, pg_weights)
                    lx_m = stats.mean()
                    lx_s = stats.gsl_stats_wsd()

                    stats = flex.mean_and_variance(local_y, pg_weights)
                    ly_m = stats.mean()
                    ly_s = stats.gsl_stats_wsd()

                    stats = flex.mean_and_variance(local_z, pg_weights)
                    lz_m = stats.mean()
                    lz_s = stats.gsl_stats_wsd()

                    stats = flex.mean_and_variance(l_rot_X, pg_weights)
                    lrx_m = stats.mean()
                    lrx_s = stats.gsl_stats_wsd()

                    stats = flex.mean_and_variance(l_rot_Y, pg_weights)
                    lry_m = stats.mean()
                    lry_s = stats.gsl_stats_wsd()

                    stats = flex.mean_and_variance(l_rot_Z, pg_weights)
                    lrz_m = stats.mean()
                    lrz_s = stats.gsl_stats_wsd()

                    ldx = local_x[0] - local_x[1]
                    ldy = local_y[0] - local_y[1]
                    ldz = local_z[0] - local_z[1]
                    ldxy = math.sqrt(ldx**2 + ldy**2)
                    ldxyz = math.sqrt(ldx**2 + ldy**2 + ldz**2)

                pg_local_x_sigmas.append(lx_s)
                pg_local_y_sigmas.append(ly_s)
                pg_local_z_sigmas.append(lz_s)
                pg_local_rotX_sigmas.append(lrx_s)
                pg_local_rotY_sigmas.append(lry_s)
                pg_local_rotZ_sigmas.append(lrz_s)
                all_local_delta_x.append(ldx)
                all_local_delta_y.append(ldy)
                all_local_delta_z.append(ldz)
                all_local_delta_xy.append(ldxy)
                all_local_delta_xyz.append(ldxyz)

                local_table_data.append([
                    "%d" % pg_id,
                    "%5.1f" % dist_m,
                    "%9.3f" % lx_m,
                    "%9.3f" % lx_s,
                    "%9.3f" % ly_m,
                    "%9.3f" % ly_s,
                    "%9.3f" % lz_m,
                    "%9.3f" % lz_s,
                    "%9.3f" % lrx_m,
                    "%9.3f" % lrx_s,
                    "%9.3f" % lry_m,
                    "%9.3f" % lry_s,
                    "%9.3f" % lrz_m,
                    "%9.3f" % lrz_s,
                    "%6d" % total_refls
                ])

                local_delta_table_data.append([
                    "%d" % pg_id,
                    "%5.1f" % dist_m,
                    "%9.1f" % (ldx * 1000),
                    "%9.1f" % (ldy * 1000),
                    "%9.3f" % ldz,
                    "%9.1f" % (ldxy * 1000),
                    "%9.3f" % ldxyz,
                    "%6d" % total_refls
                ])

        # Set up table output, starting with lab table
        table_d = {d: row for d, row in zip(pg_bc_dists, lab_table_data)}
        table_header = [
            "PanelG", "Radial", "Lab X", "Lab X", "Lab Y", "Lab Y", "Lab Z",
            "Lab Z", "Rot X", "Rot X", "Rot Y", "Rot Y", "Rot Z", "Rot Z", "N"
        ]
        table_header2 = [
            "Id", "Dist", "", "Sigma", "", "Sigma", "", "Sigma", "", "Sigma",
            "", "Sigma", "", "Sigma", "Refls"
        ]
        table_header3 = [
            "", "(mm)", "(mm)", "(mm)", "(mm)", "(mm)", "(mm)", "(mm)",
            "(deg)", "(deg)", "(deg)", "(deg)", "(deg)", "(deg)", ""
        ]
        lab_table_data = [table_header, table_header2, table_header3]
        lab_table_data.extend([table_d[key] for key in sorted(table_d)])

        if len(all_weights) > 1:
            r1 = ["All"]
            r2 = ["Mean"]
            for data, weights, fmt in [
                [None, None, None],
                [all_lab_x, all_weights.as_double(), "%9.3f"],
                [pg_lab_x_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                [all_lab_y, all_weights.as_double(), "%9.3f"],
                [pg_lab_y_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                [all_lab_z, all_weights.as_double(), "%9.3f"],
                [pg_lab_z_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                [all_rotX, all_weights.as_double(), "%9.3f"],
                [pg_rotX_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                [all_rotY, all_weights.as_double(), "%9.3f"],
                [pg_rotY_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                [all_rotZ, all_weights.as_double(), "%9.3f"],
                [pg_rotZ_sigmas,
                 all_refls_count.as_double(), "%9.3f"]
            ]:
                r2.append("")
                if data is None and weights is None:
                    r1.append("")
                    continue
                stats = flex.mean_and_variance(data, weights)
                r1.append(fmt % stats.mean())

            r1.append("")
            r2.append("%6.1f" % flex.mean(all_refls_count.as_double()))
            lab_table_data.append(r1)
            lab_table_data.append(r2)

        from libtbx import table_utils
        print "Detector statistics relative to lab origin"
        print table_utils.format(lab_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")
        print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
        print "Radial dist: distance from center of panel group to the beam center"
        print "Lab X, Y and Z: mean coordinate in lab space"
        print "Rot X, Y and Z: rotation of panel group around lab X, Y and Z axes"
        print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
        print "All: weighted mean of the values shown"
        print

        # Next, deltas in lab space
        table_d = {d: row for d, row in zip(pg_bc_dists, lab_delta_table_data)}
        table_header = [
            "PanelG", "Radial", "Lab dX", "Lab dY", "Lab dZ", "Lab dXY",
            "Lab dXYZ", "Lab dR", "Lab dT", "Lab dNorm", "N"
        ]
        table_header2 = ["Id", "Dist", "", "", "", "", "", "", "", "", "Refls"]
        table_header3 = [
            "", "(mm)", "(microns)", "(microns)", "(mm)", "(microns)", "(mm)",
            "(microns)", "(microns)", "(deg)", ""
        ]
        lab_delta_table_data = [table_header, table_header2, table_header3]
        lab_delta_table_data.extend([table_d[key] for key in sorted(table_d)])

        if len(all_weights) > 1:
            r1 = ["WMean"]
            r2 = ["WStddev"]
            r3 = ["Mean"]
            for data, weights, fmt in [
                [None, None, None],
                [all_delta_x * 1000,
                 all_refls_count.as_double(), "%9.1f"],
                [all_delta_y * 1000,
                 all_refls_count.as_double(), "%9.1f"],
                [all_delta_z,
                 all_refls_count.as_double(), "%9.3f"],
                [all_delta_xy * 1000,
                 all_refls_count.as_double(), "%9.1f"],
                [all_delta_xyz,
                 all_refls_count.as_double(), "%9.3f"],
                [all_delta_r * 1000,
                 all_refls_count.as_double(), "%9.1f"],
                [all_delta_t * 1000,
                 all_refls_count.as_double(), "%9.1f"],
                [all_delta_norm,
                 all_refls_count.as_double(), "%9.3f"]
            ]:
                r3.append("")
                if data is None and weights is None:
                    r1.append("")
                    r2.append("")
                    continue
                stats = flex.mean_and_variance(data, weights)
                r1.append(fmt % stats.mean())
                if len(data) > 1:
                    r2.append(fmt % stats.gsl_stats_wsd())
                else:
                    r2.append("-")

            r1.append("")
            r2.append("")
            r3.append("%6.1f" % flex.mean(all_refls_count.as_double()))
            lab_delta_table_data.append(r1)
            lab_delta_table_data.append(r2)
            lab_delta_table_data.append(r3)

        print "Detector deltas in lab space"
        print table_utils.format(lab_delta_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")
        print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
        print "Radial dist: distance from center of panel group to the beam center"
        print "Lab dX, dY and dZ: delta between X, Y and Z coordinates in lab space"
        print "Lab dR, dT and dZ: radial and transverse components of dXY in lab space"
        print "Lab dNorm: angle between normal vectors in lab space"
        print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
        print "WMean: weighted mean of the values shown"
        print "WStddev: weighted standard deviation of the values shown"
        print "Mean: mean of the values shown"
        print

        if params.hierarchy_level > 0:
            # Local table
            table_d = {d: row for d, row in zip(pg_bc_dists, local_table_data)}
            table_header = [
                "PanelG", "Radial", "Local X", "Local X", "Local Y", "Local Y",
                "Local Z", "Local Z", "Rot X", "Rot X", "Rot Y", "Rot Y",
                "Rot Z", "Rot Z", "N"
            ]
            table_header2 = [
                "Id", "Dist", "", "Sigma", "", "Sigma", "", "Sigma", "",
                "Sigma", "", "Sigma", "", "Sigma", "Refls"
            ]
            table_header3 = [
                "", "(mm)", "(mm)", "(mm)", "(mm)", "(mm)", "(mm)", "(mm)",
                "(deg)", "(deg)", "(deg)", "(deg)", "(deg)", "(deg)", ""
            ]
            local_table_data = [table_header, table_header2, table_header3]
            local_table_data.extend([table_d[key] for key in sorted(table_d)])

            if len(all_weights) > 1:
                r1 = ["All"]
                r2 = ["Mean"]
                for data, weights, fmt in [
                    [None, None, None],
                    [all_local_x,
                     all_weights.as_double(), "%9.3f"],
                    [pg_local_x_sigmas,
                     all_refls_count.as_double(), "%9.3f"],
                    [all_local_y,
                     all_weights.as_double(), "%9.3f"],
                    [pg_local_y_sigmas,
                     all_refls_count.as_double(), "%9.3f"],
                    [all_local_z,
                     all_weights.as_double(), "%9.3f"],
                    [pg_local_z_sigmas,
                     all_refls_count.as_double(), "%9.3f"],
                    [all_local_rotX,
                     all_weights.as_double(), "%9.3f"],
                    [
                        pg_local_rotX_sigmas,
                        all_refls_count.as_double(), "%9.3f"
                    ], [all_local_rotY,
                        all_weights.as_double(), "%9.3f"],
                    [
                        pg_local_rotY_sigmas,
                        all_refls_count.as_double(), "%9.3f"
                    ], [all_local_rotZ,
                        all_weights.as_double(), "%9.3f"],
                    [
                        pg_local_rotZ_sigmas,
                        all_refls_count.as_double(), "%9.3f"
                    ]
                ]:
                    r2.append("")
                    if data is None and weights is None:
                        r1.append("")
                        continue
                    stats = flex.mean_and_variance(data, weights)
                    r1.append(fmt % stats.mean())

                r1.append("")
                r2.append("%6.1f" % flex.mean(all_refls_count.as_double()))
                local_table_data.append(r1)
                local_table_data.append(r2)

            print "Detector statistics in local frame of each panel group"
            print table_utils.format(local_table_data,
                                     has_header=3,
                                     justify='center',
                                     delim=" ")
            print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
            print "Radial dist: distance from center of panel group to the beam center"
            print "Lab X, Y and Z: mean coordinate in relative to parent panel group"
            print "Rot X, Y and Z: rotation of panel group around parent panel group X, Y and Z axes"
            print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
            print "All: weighted mean of the values shown"
            print

            # Next, deltas in local space
            table_d = {
                d: row
                for d, row in zip(pg_bc_dists, local_delta_table_data)
            }
            table_header = [
                "PanelG", "Radial", "Local dX", "Local dY", "Local dZ",
                "Local dXY", "Local dXYZ", "N"
            ]
            table_header2 = ["Id", "Dist", "", "", "", "", "", "Refls"]
            table_header3 = [
                "", "(mm)", "(microns)", "(microns)", "(mm)", "(microns)",
                "(mm)", ""
            ]
            local_delta_table_data = [
                table_header, table_header2, table_header3
            ]
            local_delta_table_data.extend(
                [table_d[key] for key in sorted(table_d)])

            if len(all_weights) > 1:
                r1 = ["WMean"]
                r2 = ["WStddev"]
                r3 = ["Mean"]
                for data, weights, fmt in [
                    [None, None, None],
                    [
                        all_local_delta_x * 1000,
                        all_refls_count.as_double(), "%9.1f"
                    ],
                    [
                        all_local_delta_y * 1000,
                        all_refls_count.as_double(), "%9.1f"
                    ],
                    [all_local_delta_z,
                     all_refls_count.as_double(), "%9.3f"],
                    [
                        all_local_delta_xy * 1000,
                        all_refls_count.as_double(), "%9.1f"
                    ],
                    [
                        all_local_delta_xyz,
                        all_refls_count.as_double(), "%9.3f"
                    ]
                ]:
                    r3.append("")
                    if data is None and weights is None:
                        r1.append("")
                        r2.append("")
                        continue
                    stats = flex.mean_and_variance(data, weights)
                    r1.append(fmt % stats.mean())
                    r2.append(fmt % stats.gsl_stats_wsd())

                r1.append("")
                r2.append("")
                r3.append("%6.1f" % flex.mean(all_refls_count.as_double()))
                local_delta_table_data.append(r1)
                local_delta_table_data.append(r2)
                local_delta_table_data.append(r3)

            print "Detector deltas relative to panel group origin"
            print table_utils.format(local_delta_table_data,
                                     has_header=3,
                                     justify='center',
                                     delim=" ")
            print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
            print "Radial dist: distance from center of panel group to the beam center"
            print "Local dX, dY and dZ: delta between X, Y and Z coordinates in the local frame of the panel group"
            print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
            print "All: weighted mean of the values shown"
            print

        #RMSD table
        table_d = {d: row for d, row in zip(pg_bc_dists, rmsds_table_data)}
        table_header = ["PanelG"]
        table_header2 = ["Id"]
        table_header3 = [""]
        for i in range(len(detectors)):
            table_header.extend(["D%d" % i] * 4)
            table_header2.extend(["RMSD", "rRMSD", "tRMSD", "N refls"])
            table_header3.extend(["(microns)"] * 3)
            table_header3.append("")
        rmsds_table_data = [table_header, table_header2, table_header3]
        rmsds_table_data.extend([table_d[key] for key in sorted(table_d)])

        row = ["Overall"]
        for refls in reflections:
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['difference_vector_norms']) / len(refls)) *
                                  1000))
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['radial_displacements']) / len(refls)) *
                                  1000))
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['transverse_displacements']) / len(refls)) *
                                  1000))
            row.append("%8d" % len(refls))
        rmsds_table_data.append(row)

        print "RMSDs by detector number"
        print table_utils.format(rmsds_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")
        print "PanelG Id: panel group id or panel id, depending on hierarchy_level"
        print "RMSD: root mean squared deviation between observed and predicted spot locations"
        print "rRMSD: RMSD of radial components of the observed-predicted vectors"
        print "tRMSD: RMSD of transverse components of the observed-predicted vectors"
        print "N refls: number of reflections"

        if params.tag is None:
            tag = ""
        else:
            tag = "%s " % params.tag

        if params.show_plots:
            # Plot the results
            self.detector_plot_dict(detectors[0],
                                    refl_counts,
                                    u"%sN reflections" % tag,
                                    u"%6d",
                                    show=False)
예제 #9
0
  def run(self):
    ''' Parse the options. '''
    from dials.util.options import flatten_experiments, flatten_reflections
    # Parse the command line arguments
    params, options = self.parser.parse_args(show_diff_phil=True)
    self.params = params
    experiments = flatten_experiments(params.input.experiments)

    # Find all detector objects
    detectors = experiments.detectors()

    # Verify inputs
    if len(params.input.reflections) == len(detectors) and len(detectors) > 1:
      # case for passing in multiple images on the command line
      assert len(params.input.reflections) == len(detectors)
      reflections = flex.reflection_table()
      for expt_id in xrange(len(detectors)):
        subset = params.input.reflections[expt_id].data
        subset['id'] = flex.int(len(subset), expt_id)
        reflections.extend(subset)
    else:
      # case for passing in combined experiments and reflections
      reflections = flatten_reflections(params.input.reflections)[0]

    detector = detectors[0]

    #from dials.algorithms.refinement.prediction import ExperimentsPredictor
    #ref_predictor = ExperimentsPredictor(experiments, force_stills=experiments.all_stills())

    print "N reflections total:", len(reflections)
    if params.residuals.exclude_outliers:
      reflections = reflections.select(reflections.get_flags(reflections.flags.used_in_refinement))
      print "N reflections used in refinement:", len(reflections)
      print "Reporting only on those reflections used in refinement"

    if self.params.residuals.i_sigi_cutoff is not None:
      sel = (reflections['intensity.sum.value']/flex.sqrt(reflections['intensity.sum.variance'])) >= self.params.residuals.i_sigi_cutoff
      reflections = reflections.select(sel)
      print "After filtering by I/sigi cutoff of %f, there are %d reflections left"%(self.params.residuals.i_sigi_cutoff,len(reflections))

    reflections['difference_vector_norms'] = (reflections['xyzcal.mm']-reflections['xyzobs.mm.value']).norms()

    n = len(reflections)
    rmsd = self.get_weighted_rmsd(reflections)
    print "Dataset RMSD (microns)", rmsd * 1000

    if params.tag is None:
      tag = ''
    else:
      tag = '%s '%params.tag

    # set up delta-psi ratio heatmap
    p = flex.int() # positive
    n = flex.int() # negative
    for i in set(reflections['id']):
      exprefls = reflections.select(reflections['id']==i)
      p.append(len(exprefls.select(exprefls['delpsical.rad']>0)))
      n.append(len(exprefls.select(exprefls['delpsical.rad']<0)))
    plt.hist2d(p, n, bins=30)
    cb = plt.colorbar()
    cb.set_label("N images")
    plt.title(r"%s2D histogram of pos vs. neg $\Delta\Psi$ per image"%tag)
    plt.xlabel(r"N reflections with $\Delta\Psi$ > 0")
    plt.ylabel(r"N reflections with $\Delta\Psi$ < 0")

    self.delta_scalar = 50

    # Iterate through the detectors, computing detector statistics at the per-panel level (IE one statistic per panel)
    # Per panel dictionaries
    rmsds = {}
    refl_counts = {}
    transverse_rmsds = {}
    radial_rmsds = {}
    ttdpcorr = {}
    pg_bc_dists = {}
    mean_delta_two_theta = {}
    # per panelgroup flex arrays
    pg_rmsds = flex.double()
    pg_r_rmsds = flex.double()
    pg_t_rmsds = flex.double()
    pg_refls_count = flex.int()
    pg_refls_count_d = {}
    table_header = ["PG id", "RMSD","Radial", "Transverse", "N refls"]
    table_header2 = ["","(um)","RMSD (um)","RMSD (um)",""]
    table_data = []
    table_data.append(table_header)
    table_data.append(table_header2)

    # Compute a set of radial and transverse displacements for each reflection
    print "Setting up stats..."
    tmp = flex.reflection_table()
    # Need to construct a variety of vectors
    for panel_id, panel in enumerate(detector):
      panel_refls = reflections.select(reflections['panel'] == panel_id)
      bcl = flex.vec3_double()
      tto = flex.double()
      ttc = flex.double()
      # Compute the beam center in lab space (a vector pointing from the origin to where the beam would intersect
      # the panel, if it did intersect the panel)
      for expt_id in set(panel_refls['id']):
        beam = experiments[expt_id].beam
        s0 = beam.get_s0()
        expt_refls = panel_refls.select(panel_refls['id'] == expt_id)
        beam_centre = panel.get_beam_centre_lab(s0)
        bcl.extend(flex.vec3_double(len(expt_refls), beam_centre))
        obs_x, obs_y, _ = expt_refls['xyzobs.px.value'].parts()
        cal_x, cal_y, _ = expt_refls['xyzcal.px'].parts()
        tto.extend(flex.double([panel.get_two_theta_at_pixel(s0, (obs_x[i], obs_y[i])) for i in xrange(len(expt_refls))]))
        ttc.extend(flex.double([panel.get_two_theta_at_pixel(s0, (cal_x[i], cal_y[i])) for i in xrange(len(expt_refls))]))
      panel_refls['beam_centre_lab'] = bcl
      panel_refls['two_theta_obs'] = tto * (180/math.pi)
      panel_refls['two_theta_cal'] = ttc * (180/math.pi) #+ (0.5*panel_refls['delpsical.rad']*panel_refls['two_theta_obs'])
      # Compute obs in lab space
      x, y, _ = panel_refls['xyzobs.mm.value'].parts()
      c = flex.vec2_double(x, y)
      panel_refls['obs_lab_coords'] = panel.get_lab_coord(c)
      # Compute deltaXY in panel space. This vector is relative to the panel origin
      x, y, _ = (panel_refls['xyzcal.mm'] - panel_refls['xyzobs.mm.value']).parts()
      # Convert deltaXY to lab space, subtracting off of the panel origin
      panel_refls['delta_lab_coords'] = panel.get_lab_coord(flex.vec2_double(x,y)) - panel.get_origin()
      tmp.extend(panel_refls)
    reflections = tmp
    # The radial vector points from the center of the reflection to the beam center
    radial_vectors = (reflections['obs_lab_coords'] - reflections['beam_centre_lab']).each_normalize()
    # The transverse vector is orthogonal to the radial vector and the beam vector
    transverse_vectors = radial_vectors.cross(reflections['beam_centre_lab']).each_normalize()
    # Compute the raidal and transverse components of each deltaXY
    reflections['radial_displacements']     = reflections['delta_lab_coords'].dot(radial_vectors)
    reflections['transverse_displacements'] = reflections['delta_lab_coords'].dot(transverse_vectors)

    # Iterate through the detector at the specified hierarchy level
    for pg_id, pg in enumerate(iterate_detector_at_level(detector.hierarchy(), 0, params.hierarchy_level)):
      pg_msd_sum = 0
      pg_r_msd_sum = 0
      pg_t_msd_sum = 0
      pg_refls = 0
      pg_delpsi = flex.double()
      pg_deltwotheta = flex.double()
      for p in iterate_panels(pg):
        panel_id = id_from_name(detector, p.get_name())
        panel_refls = reflections.select(reflections['panel'] == panel_id)
        n = len(panel_refls)
        pg_refls += n

        delta_x = panel_refls['xyzcal.mm'].parts()[0] - panel_refls['xyzobs.mm.value'].parts()[0]
        delta_y = panel_refls['xyzcal.mm'].parts()[1] - panel_refls['xyzobs.mm.value'].parts()[1]

        tmp = flex.sum((delta_x**2)+(delta_y**2))
        pg_msd_sum += tmp

        r = panel_refls['radial_displacements']
        t = panel_refls['transverse_displacements']
        pg_r_msd_sum += flex.sum_sq(r)
        pg_t_msd_sum += flex.sum_sq(t)

        pg_delpsi.extend(panel_refls['delpsical.rad']*180/math.pi)
        pg_deltwotheta.extend(panel_refls['two_theta_obs'] - panel_refls['two_theta_cal'])

      bc = col(pg.get_beam_centre_lab(s0))
      ori = get_center(pg)
      pg_bc_dists[pg.get_name()] = (ori-bc).length()
      if len(pg_deltwotheta) > 0:
        mean_delta_two_theta[pg.get_name()] = flex.mean(pg_deltwotheta)
      else:
        mean_delta_two_theta[pg.get_name()] = 0

      if pg_refls == 0:
        pg_rmsd = pg_r_rmsd = pg_t_rmsd = 0
      else:
        pg_rmsd = math.sqrt(pg_msd_sum/pg_refls) * 1000
        pg_r_rmsd = math.sqrt(pg_r_msd_sum/pg_refls) * 1000
        pg_t_rmsd = math.sqrt(pg_t_msd_sum/pg_refls) * 1000
      pg_rmsds.append(pg_rmsd)
      pg_r_rmsds.append(pg_r_rmsd)
      pg_t_rmsds.append(pg_t_rmsd)
      pg_refls_count.append(pg_refls)
      pg_refls_count_d[pg.get_name()] = pg_refls
      table_data.append(["%d"%pg_id, "%.1f"%pg_rmsd, "%.1f"%pg_r_rmsd, "%.1f"%pg_t_rmsd, "%6d"%pg_refls])

      refl_counts[pg.get_name()] = pg_refls
      if pg_refls == 0:
        rmsds[p.get_name()] = -1
        radial_rmsds[p.get_name()] = -1
        transverse_rmsds[p.get_name()] = -1
        ttdpcorr[pg.get_name()] = -1
      else:
        rmsds[pg.get_name()] = pg_rmsd
        radial_rmsds[pg.get_name()]     = pg_r_rmsd
        transverse_rmsds[pg.get_name()] = pg_t_rmsd

        lc = flex.linear_correlation(pg_delpsi, pg_deltwotheta)
        ttdpcorr[pg.get_name()] = lc.coefficient()


    r1 = ["Weighted mean"]
    r2 = ["Weighted stddev"]
    if len(pg_rmsds) > 1:
      stats = flex.mean_and_variance(pg_rmsds, pg_refls_count.as_double())
      r1.append("%.1f"%stats.mean())
      r2.append("%.1f"%stats.gsl_stats_wsd())
      stats = flex.mean_and_variance(pg_r_rmsds, pg_refls_count.as_double())
      r1.append("%.1f"%stats.mean())
      r2.append("%.1f"%stats.gsl_stats_wsd())
      stats = flex.mean_and_variance(pg_t_rmsds, pg_refls_count.as_double())
      r1.append("%.1f"%stats.mean())
      r2.append("%.1f"%stats.gsl_stats_wsd())
    else:
      r1.extend([""]*3)
      r2.extend([""]*3)
    r1.append("")
    r2.append("")
    table_data.append(r1)
    table_data.append(r2)
    table_data.append(["Mean", "", "", "", "%8.1f"%flex.mean(pg_refls_count.as_double())])

    from libtbx import table_utils
    print "Detector statistics.  Angles in degrees, RMSDs in microns"
    print table_utils.format(table_data,has_header=2,justify='center',delim=" ")

    self.histogram(reflections, '%sDifference vector norms (mm)'%tag)

    if params.show_plots:
      if self.params.tag is None:
        t = ""
      else:
        t = "%s "%self.params.tag
      self.image_rmsd_histogram(reflections, tag)

      # Plots! these are plots with callbacks to draw on individual panels
      self.detector_plot_refls(detector, reflections, '%sOverall positional displacements (mm)'%tag, show=False, plot_callback=self.plot_obs_colored_by_deltas)
      self.detector_plot_refls(detector, reflections, '%sRadial positional displacements (mm)'%tag, show=False, plot_callback=self.plot_obs_colored_by_radial_deltas)
      self.detector_plot_refls(detector, reflections, '%sTransverse positional displacements (mm)'%tag, show=False, plot_callback=self.plot_obs_colored_by_transverse_deltas)
      self.detector_plot_refls(detector, reflections, r'%s$\Delta\Psi$'%tag, show=False, plot_callback=self.plot_obs_colored_by_deltapsi, colorbar_units=r"$\circ$")
      self.detector_plot_refls(detector, reflections, r'%s$\Delta$XY*%s'%(tag, self.delta_scalar), show=False, plot_callback=self.plot_deltas)
      self.detector_plot_refls(detector, reflections, '%sSP Manual CDF'%tag, show=False, plot_callback=self.plot_cdf_manually)
      self.detector_plot_refls(detector, reflections, r'%s$\Delta$XY Histograms'%tag, show=False, plot_callback=self.plot_histograms)
      self.detector_plot_refls(detector, reflections, r'%sRadial displacements vs. $\Delta\Psi$, colored by $\Delta$XY'%tag, show=False, plot_callback=self.plot_radial_displacements_vs_deltapsi)
      self.detector_plot_refls(detector, reflections, r'%sDistance vector norms'%tag, show=False, plot_callback=self.plot_difference_vector_norms_histograms)

      # Plot intensity vs. radial_displacement
      fig = plt.figure()
      panel_id = 15
      panel_refls = reflections.select(reflections['panel'] == panel_id)
      a = panel_refls['radial_displacements']
      b = panel_refls['intensity.sum.value']
      sel = (a > -0.2) & (a < 0.2) & (b < 50000)
      plt.hist2d(a.select(sel), b.select(sel), bins=100)
      plt.title("%s2D histogram of intensity vs. radial displacement for panel %d"%(tag, panel_id))
      plt.xlabel("Radial displacement (mm)")
      plt.ylabel("Intensity")
      ax = plt.colorbar()
      ax.set_label("Counts")

      # Plot delta 2theta vs. deltapsi
      n_bins = 10
      bin_size = len(reflections)//n_bins
      bin_low = []
      bin_high = []
      data = flex.sorted(reflections['two_theta_obs'])
      for i in xrange(n_bins):
        bin_low = data[i*bin_size]
        if (i+1)*bin_size >= len(reflections):
          bin_high = data[-1]
        else:
          bin_high = data[(i+1)*bin_size]
        refls = reflections.select((reflections['two_theta_obs'] >= bin_low) &
                                   (reflections['two_theta_obs'] <= bin_high))
        a = refls['delpsical.rad']*180/math.pi
        b = refls['two_theta_obs'] - refls['two_theta_cal']
        fig = plt.figure()
        sel = (a > -0.2) & (a < 0.2) & (b > -0.05) & (b < 0.05)
        plt.hist2d(a.select(sel), b.select(sel), bins=50, range = [[-0.2, 0.2], [-0.05, 0.05]])
        cb = plt.colorbar()
        cb.set_label("N reflections")
        plt.title(r'%sBin %d (%.02f, %.02f 2$\Theta$) $\Delta2\Theta$ vs. $\Delta\Psi$. Showing %d of %d refls'%(tag,i,bin_low,bin_high,len(a.select(sel)),len(a)))
        plt.xlabel(r'$\Delta\Psi \circ$')
        plt.ylabel(r'$\Delta2\Theta \circ$')

      # Plot delta 2theta vs. 2theta
      a = reflections['two_theta_obs']#[:71610]
      b = reflections['two_theta_obs'] - reflections['two_theta_cal']
      fig = plt.figure()
      limits = -0.05, 0.05
      sel = (b > limits[0]) & (b < limits[1])
      plt.hist2d(a.select(sel), b.select(sel), bins=100, range=((0,50), limits))
      plt.clim((0,100))
      cb = plt.colorbar()
      cb.set_label("N reflections")
      plt.title(r'%s$\Delta2\Theta$ vs. 2$\Theta$. Showing %d of %d refls'%(tag,len(a.select(sel)),len(a)))
      plt.xlabel(r'2$\Theta \circ$')
      plt.ylabel(r'$\Delta2\Theta \circ$')

      # calc the trendline
      z = np.polyfit(a.select(sel), b.select(sel), 1)
      print 'y=%.7fx+(%.7f)'%(z[0],z[1])

      # Plots with single values per panel
      self.detector_plot_dict(detector, refl_counts, u"%s N reflections"%t, u"%6d", show=False)
      self.detector_plot_dict(detector, rmsds, "%s Positional RMSDs (microns)"%t, u"%4.1f", show=False)
      self.detector_plot_dict(detector, radial_rmsds, "%s Radial RMSDs (microns)"%t, u"%4.1f", show=False)
      self.detector_plot_dict(detector, transverse_rmsds, "%s Transverse RMSDs (microns)"%t, u"%4.1f", show=False)
      self.detector_plot_dict(detector, ttdpcorr, r"%s $\Delta2\Theta$ vs. $\Delta\Psi$ CC"%t, u"%5.3f", show=False)

      self.plot_unitcells(experiments)
      self.plot_data_by_two_theta(reflections, tag)

      # Plot data by panel group
      sorted_values = sorted(pg_bc_dists.values())
      vdict = {}
      for k in pg_bc_dists:
        vdict[pg_bc_dists[k]] = k
      sorted_keys = [vdict[v] for v in sorted_values if vdict[v] in rmsds]
      x = [sorted_values[i] for i in xrange(len(sorted_values)) if pg_bc_dists.keys()[i] in rmsds]

      self.plot_multi_data(x,
                           [[pg_refls_count_d[k] for k in sorted_keys],
                            ([rmsds[k] for k in sorted_keys],
                             [radial_rmsds[k] for k in sorted_keys],
                             [transverse_rmsds[k] for k in sorted_keys]),
                            [radial_rmsds[k]/transverse_rmsds[k] for k in sorted_keys],
                            [mean_delta_two_theta[k] for k in sorted_keys]],
                           "Panel group distance from beam center (mm)",
                           ["N reflections",
                            ("Overall RMSD",
                             "Radial RMSD",
                             "Transverse RMSD"),
                            "R/T RMSD ratio",
                            "Delta two theta"],
                           ["N reflections",
                            "RMSD (microns)",
                            "R/T RMSD ratio",
                            "Delta two theta (degrees)"],
                           "%sData by panelgroup"%tag)

      if self.params.save_pdf:
        pp = PdfPages('residuals_%s.pdf'%(tag.strip()))
        for i in plt.get_fignums():
          pp.savefig(plt.figure(i))
        pp.close()
      else:
        plt.show()
    def run(self):
        ''' Parse the options. '''
        from dials.util.options import flatten_experiments, flatten_reflections
        from dxtbx.model.experiment.experiment_list import ExperimentList
        from scitbx.math import five_number_summary
        # Parse the command line arguments
        params, options = self.parser.parse_args(show_diff_phil=True)
        self.params = params
        experiments = flatten_experiments(params.input.experiments)
        reflections = flatten_reflections(params.input.reflections)

        assert len(reflections) == 1
        reflections = reflections[0]
        print "Found", len(reflections), "reflections", "and", len(
            experiments), "experiments"

        difference_vector_norms = (reflections['xyzcal.mm'] -
                                   reflections['xyzobs.mm.value']).norms()

        data = flex.double()
        counts = flex.double()
        for i in xrange(len(experiments)):
            dvns = difference_vector_norms.select(reflections['id'] == i)
            counts.append(len(dvns))
            if len(dvns) == 0:
                data.append(0)
                continue
            rmsd = math.sqrt(flex.sum_sq(dvns) / len(dvns))
            data.append(rmsd)
        data *= 1000
        subset = data.select(counts > 0)
        print len(subset), "experiments with > 0 reflections"

        if params.show_plots:
            h = flex.histogram(subset, n_slots=40)
            fig = plt.figure()
            ax = fig.add_subplot('111')
            ax.plot(h.slot_centers().as_numpy_array(),
                    h.slots().as_numpy_array(), '-')
            plt.title("Histogram of %d image RMSDs" % len(subset))

            fig = plt.figure()
            plt.boxplot(subset, vert=False)
            plt.title("Boxplot of %d image RMSDs" % len(subset))
            plt.show()

        outliers = counts == 0
        min_x, q1_x, med_x, q3_x, max_x = five_number_summary(subset)
        print "Five number summary of RMSDs (microns): min %.1f, q1 %.1f, med %.1f, q3 %.1f, max %.1f" % (
            min_x, q1_x, med_x, q3_x, max_x)
        iqr_x = q3_x - q1_x
        cut_x = params.iqr_multiplier * iqr_x
        outliers.set_selected(data > q3_x + cut_x, True)
        #outliers.set_selected(col < q1_x - cut_x, True) # Don't throw away the images that are outliers in the 'good' direction!

        filtered_reflections = flex.reflection_table()
        filtered_experiments = ExperimentList()
        for i in xrange(len(experiments)):
            if outliers[i]:
                continue
            refls = reflections.select(reflections['id'] == i)
            refls['id'] = flex.int(len(refls), len(filtered_experiments))
            filtered_reflections.extend(refls)
            filtered_experiments.append(experiments[i])

        zeroes = counts == 0
        n_zero = len(counts.select(zeroes))
        print "Removed %d bad experiments and %d experiments with zero reflections, out of %d (%%%.1f)" % (
            len(experiments) - len(filtered_experiments) - n_zero, n_zero,
            len(experiments), 100 *
            ((len(experiments) - len(filtered_experiments)) /
             len(experiments)))
        from dxtbx.model.experiment.experiment_list import ExperimentListDumper
        dump = ExperimentListDumper(filtered_experiments)
        dump.as_json(params.output.filtered_experiments)

        filtered_reflections.as_pickle(params.output.filtered_reflections)