def integration_concept(self,
                            image_number=0,
                            cb_op_to_primitive=None,
                            verbose=False,
                            **kwargs):
        self.image_number = image_number
        NEAR = 10
        pxlsz = self.pixel_size
        self.get_predictions_accounting_for_centering(cb_op_to_primitive,
                                                      **kwargs)
        FWMOSAICITY = self.inputai.getMosaicity()
        DOMAIN_SZ_ANG = kwargs.get("domain_size_ang",
                                   self.__dict__.get("actual", 0))
        refineflag = {True: 0, False: 1}[kwargs.get("domain_size_ang", 0) == 0]
        self.inputpd["symmetry"].show_summary(
            prefix="EXCURSION%1d REPORT FWMOS= %6.4f DOMAIN= %6.1f " %
            (refineflag, FWMOSAICITY, DOMAIN_SZ_ANG))
        from annlib_ext import AnnAdaptor
        self.cell = self.inputai.getOrientation().unit_cell()
        query = flex.double()
        for pred in self.predicted:  # predicted spot coord in pixels
            query.append(pred[0] / pxlsz)
            query.append(pred[1] / pxlsz)
        self.reserve_hkllist_for_signal_search = self.hkllist

        reference = flex.double()
        spots = self.get_observations_with_outlier_removal()

        assert len(
            spots) > NEAR  # Can't do spot/pred matching with too few spots
        for spot in spots:
            reference.append(spot.ctr_mass_x())
            reference.append(spot.ctr_mass_y())

        IS_adapt = AnnAdaptor(data=reference, dim=2, k=NEAR)
        IS_adapt.query(query)
        print "Calculate correction vectors for %d observations & %d predictions" % (
            len(spots), len(self.predicted))
        indexed_pairs_provisional = []
        correction_vectors_provisional = []
        c_v_p_flex = flex.vec3_double()
        idx_cutoff = float(min(self.mask_focus[image_number]))
        if verbose:
            print "idx_cutoff distance in pixels", idx_cutoff
        if not self.horizons_phil.integration.enable_one_to_one_safeguard:
            # legacy code, no safeguard against many-to-one predicted-to-observation mapping
            for i in range(len(self.predicted)):  # loop over predicteds
                #for n in range(NEAR): # loop over near spotfinder spots
                for n in range(1):  # only consider the nearest spotfinder spot
                    Match = dict(spot=IS_adapt.nn[i * NEAR + n], pred=i)
                    if n == 0 and math.sqrt(
                            IS_adapt.distances[i * NEAR + n]) < idx_cutoff:
                        indexed_pairs_provisional.append(Match)

                        vector = matrix.col([
                            spots[Match["spot"]].ctr_mass_x() -
                            self.predicted[Match["pred"]][0] / pxlsz,
                            spots[Match["spot"]].ctr_mass_y() -
                            self.predicted[Match["pred"]][1] / pxlsz
                        ])
                        correction_vectors_provisional.append(vector)
                        c_v_p_flex.append((vector[0], vector[1], 0.))
        else:
            one_to_one = {}
            for i in range(len(self.predicted)):  # loop over predicteds
                annresultidx = i * NEAR
                obsidx = IS_adapt.nn[annresultidx]
                this_distancesq = IS_adapt.distances[annresultidx]
                if obsidx not in one_to_one or \
                   this_distancesq < one_to_one[obsidx]["distancesq"]:
                    if math.sqrt(this_distancesq) < idx_cutoff:
                        one_to_one[obsidx] = dict(spot=obsidx,
                                                  pred=i,
                                                  distancesq=this_distancesq)
            for key, value in one_to_one.items():
                indexed_pairs_provisional.append(value)
                vector = matrix.col([
                    spots[value["spot"]].ctr_mass_x() -
                    self.predicted[value["pred"]][0] / pxlsz,
                    spots[value["spot"]].ctr_mass_y() -
                    self.predicted[value["pred"]][1] / pxlsz
                ])
                correction_vectors_provisional.append(vector)
                c_v_p_flex.append((vector[0], vector[1], 0.))

        print "... %d provisional matches" % len(
            correction_vectors_provisional),
        print "r.m.s.d. in pixels: %5.2f" % (math.sqrt(
            flex.mean(c_v_p_flex.dot(c_v_p_flex))))

        if self.horizons_phil.integration.enable_residual_scatter:
            from matplotlib import pyplot as plt
            fig = plt.figure()
            for cv in correction_vectors_provisional:
                plt.plot([cv[1]], [-cv[0]], "b.")
            plt.title(" %d matches, r.m.s.d. %5.2f pixels" %
                      (len(correction_vectors_provisional),
                       math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))))
            plt.axes().set_aspect("equal")
            self.show_figure(plt, fig, "res")
            plt.close()

        if self.horizons_phil.integration.enable_residual_map:
            from matplotlib import pyplot as plt
            fig = plt.figure()
            for match, cv in zip(indexed_pairs_provisional,
                                 correction_vectors_provisional):
                plt.plot([spots[match["spot"]].ctr_mass_y()],
                         [-spots[match["spot"]].ctr_mass_x()], "r.")
                plt.plot([self.predicted[match["pred"]][1] / pxlsz],
                         [-self.predicted[match["pred"]][0] / pxlsz], "g.")
                plt.plot([
                    spots[match["spot"]].ctr_mass_y(),
                    spots[match["spot"]].ctr_mass_y() + 10. * cv[1]
                ], [
                    -spots[match["spot"]].ctr_mass_x(),
                    -spots[match["spot"]].ctr_mass_x() - 10. * cv[0]
                ], 'b-')
            plt.xlim([0, float(self.inputpd["size2"])])
            plt.ylim([-float(self.inputpd["size1"]), 0])
            plt.title(" %d matches, r.m.s.d. %5.2f pixels" %
                      (len(correction_vectors_provisional),
                       math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))))
            plt.axes().set_aspect("equal")
            self.show_figure(plt, fig, "map")
            plt.close()
        # insert code here to remove correction length outliers...
        # they are causing terrible
        # problems for finding legitimate correction vectors (print out the list)
        # also remove outliers for the purpose of reporting RMS
        outlier_rejection = True
        cache_refinement_spots = getattr(slip_callbacks.slip_callback,
                                         "requires_refinement_spots", False)
        if outlier_rejection:
            correction_lengths = flex.double(
                [v.length() for v in correction_vectors_provisional])
            clorder = flex.sort_permutation(correction_lengths)
            sorted_cl = correction_lengths.select(clorder)

            ACCEPTABLE_LIMIT = 2
            limit = int(
                0.33 * len(sorted_cl)
            )  # best 1/3 of data are assumed to be correctly modeled.
            if (limit <= ACCEPTABLE_LIMIT):
                raise Sorry(
                    "Not enough indexed spots to reject outliers; have %d need >%d"
                    % (limit, ACCEPTABLE_LIMIT))

            y_data = flex.double(len(sorted_cl))
            for i in range(len(y_data)):
                y_data[i] = float(i) / float(len(y_data))

            # ideas are explained in Sauter & Poon (2010) J Appl Cryst 43, 611-616.
            from rstbx.outlier_spots.fit_distribution import fit_cdf, rayleigh
            fitted_rayleigh = fit_cdf(x_data=sorted_cl[0:limit],
                                      y_data=y_data[0:limit],
                                      distribution=rayleigh)

            inv_cdf = [
                fitted_rayleigh.distribution.inv_cdf(cdf) for cdf in y_data
            ]

            #print "SORTED LIST OF ",len(sorted_cl), "with sigma",fitted_rayleigh.distribution.sigma
            indexed_pairs = []
            correction_vectors = []
            self.correction_vectors = []
            for icand in range(len(sorted_cl)):
                # somewhat arbitrary sigma = 1.0 cutoff for outliers
                if (sorted_cl[icand] - inv_cdf[icand]
                    ) / fitted_rayleigh.distribution.sigma > 1.0:
                    break
                indexed_pairs.append(indexed_pairs_provisional[clorder[icand]])
                correction_vectors.append(
                    correction_vectors_provisional[clorder[icand]])
                if cache_refinement_spots:
                    self.spotfinder.images[self.frame_numbers[
                        self.image_number]]["refinement_spots"].append(
                            spots[indexed_pairs[-1]["spot"]])
                if kwargs.get("verbose_cv") == True:
                    print "CV OBSCENTER %7.2f %7.2f REFINEDCENTER %7.2f %7.2f" % (
                        float(self.inputpd["size1"]) / 2.,
                        float(self.inputpd["size2"]) / 2.,
                        self.inputai.xbeam() / pxlsz,
                        self.inputai.ybeam() / pxlsz),
                    print "OBSSPOT %7.2f %7.2f PREDSPOT %7.2f %7.2f" % (
                        spots[indexed_pairs[-1]["spot"]].ctr_mass_x(),
                        spots[indexed_pairs[-1]["spot"]].ctr_mass_y(),
                        self.predicted[indexed_pairs[-1]["pred"]][0] / pxlsz,
                        self.predicted[indexed_pairs[-1]["pred"]][1] / pxlsz),
                    the_hkl = self.hkllist[indexed_pairs[-1]["pred"]]
                    print "HKL %4d %4d %4d" % the_hkl, "%2d" % self.setting_id,
                    radial, azimuthal = spots[indexed_pairs[-1][
                        "spot"]].get_radial_and_azimuthal_size(
                            self.inputai.xbeam() / pxlsz,
                            self.inputai.ybeam() / pxlsz)
                    print "RADIALpx %5.3f AZIMUTpx %5.3f" % (radial, azimuthal)

                # Store a list of correction vectors in self.
                radial, azimuthal = spots[
                    indexed_pairs[-1]['spot']].get_radial_and_azimuthal_size(
                        self.inputai.xbeam() / pxlsz,
                        self.inputai.ybeam() / pxlsz)
                self.correction_vectors.append(
                    dict(obscenter=(float(self.inputpd['size1']) / 2,
                                    float(self.inputpd['size2']) / 2),
                         refinedcenter=(self.inputai.xbeam() / pxlsz,
                                        self.inputai.ybeam() / pxlsz),
                         obsspot=(
                             spots[indexed_pairs[-1]['spot']].ctr_mass_x(),
                             spots[indexed_pairs[-1]['spot']].ctr_mass_y()),
                         predspot=(
                             self.predicted[indexed_pairs[-1]['pred']][0] /
                             pxlsz,
                             self.predicted[indexed_pairs[-1]['pred']][1] /
                             pxlsz),
                         hkl=(self.hkllist[indexed_pairs[-1]['pred']][0],
                              self.hkllist[indexed_pairs[-1]['pred']][1],
                              self.hkllist[indexed_pairs[-1]['pred']][2]),
                         setting_id=self.setting_id,
                         radial=radial,
                         azimuthal=azimuthal))

            print "After outlier rejection %d indexed spotfinder spots remain." % len(
                indexed_pairs)
            if False:
                rayleigh_cdf = [
                    fitted_rayleigh.distribution.cdf(x=sorted_cl[c])
                    for c in range(len(sorted_cl))
                ]
                from matplotlib import pyplot as plt
                plt.plot(sorted_cl, y_data, "r+")
                #plt.plot(sorted_cl,rayleigh_cdf,"g.")
                plt.plot(inv_cdf, y_data, "b.")
                plt.show()
        else:
            indexed_pairs = indexed_pairs_provisional
            correction_vectors = correction_vectors_provisional
        ########### finished with outlier rejection

        self.inputpd["symmetry"].show_summary(prefix="SETTING ")

        is_triclinic = (self.setting_id == 1)
        if is_triclinic:
            self.triclinic_pairs = [
                dict(pred=self.hkllist[a["pred"]], spot=a["spot"])
                for a in indexed_pairs
            ]

        if self.horizons_phil.integration.model == "user_supplied":
            if kwargs.get("user-reentrant", None) == None:
                from cxi_user import post_outlier_rejection
                self.indexed_pairs = indexed_pairs
                self.spots = spots
                post_outlier_rejection(self, image_number, cb_op_to_primitive,
                                       self.horizons_phil, kwargs)
                return

        ########### finished with user-supplied code

        if self.horizons_phil.integration.spot_shape_verbose:
            from rstbx.new_horizons.spot_shape import spot_shape_verbose
            spot_shape_verbose(rawdata=self.imagefiles.images[
                self.image_number].linearintdata,
                               beam_center_pix=matrix.col(
                                   (self.inputai.xbeam() / pxlsz,
                                    self.inputai.ybeam() / pxlsz)),
                               indexed_pairs=indexed_pairs,
                               spotfinder_observations=spots,
                               distance_mm=self.inputai.distance(),
                               mm_per_pixel=pxlsz,
                               hkllist=self.hkllist,
                               unit_cell=self.cell,
                               wavelength_ang=self.inputai.wavelength)

        #Other checks to be implemented (future):
        # spot is within active area of detector on a circular detector such as the Mar IP
        # integration masks do not overlap; or deconvolute

        correction_lengths = flex.double(
            [v.length() for v in correction_vectors])
        if verbose:
            print "average correction %5.2f over %d vectors" % (
                flex.mean(correction_lengths), len(correction_lengths)),
            print "or %5.2f mm." % (pxlsz * flex.mean(correction_lengths))
        self.r_residual = pxlsz * flex.mean(correction_lengths)

        #assert len(indexed_pairs)>NEAR # must have enough indexed spots
        if (len(indexed_pairs) <= NEAR):
            raise Sorry("Not enough indexed spots, only found %d, need %d" %
                        (len(indexed_pairs), NEAR))

        reference = flex.double()
        for item in indexed_pairs:
            reference.append(spots[item["spot"]].ctr_mass_x())
            reference.append(spots[item["spot"]].ctr_mass_y())

        PS_adapt = AnnAdaptor(data=reference, dim=2, k=NEAR)
        PS_adapt.query(query)

        self.BSmasks = []
        #self.null_correction_mapping( predicted=self.predicted,
        #                                    correction_vectors = correction_vectors,
        #                                    IS_adapt = IS_adapt,
        #                                    spots = spots)
        self.positional_correction_mapping(
            predicted=self.predicted,
            correction_vectors=correction_vectors,
            PS_adapt=PS_adapt,
            IS_adapt=IS_adapt,
            spots=spots)

        # which spots are close enough to interfere with background?
        MAXOVER = 6
        OS_adapt = AnnAdaptor(data=query, dim=2, k=MAXOVER)  #six near nbrs
        OS_adapt.query(query)
        if self.mask_focus[image_number] is None:
            raise Sorry(
                "No observed/predicted spot agreement; no Spotfinder masks; skip integration"
            )
        nbr_cutoff = 2.0 * max(self.mask_focus[image_number])
        FRAME = int(nbr_cutoff / 2)
        #print "The overlap cutoff is %d pixels"%nbr_cutoff
        nbr_cutoff_sq = nbr_cutoff * nbr_cutoff

        #print "Optimized C++ section...",
        self.set_frame(FRAME)
        self.set_background_factor(kwargs["background_factor"])
        self.set_nbr_cutoff_sq(nbr_cutoff_sq)
        self.set_guard_width_sq(self.horizons_phil.integration.guard_width_sq)
        self.set_detector_gain(self.horizons_phil.integration.detector_gain)
        flex_sorted = flex.int()
        for item in self.sorted:
            flex_sorted.append(item[0])
            flex_sorted.append(item[1])

        if self.horizons_phil.integration.mask_pixel_value is not None:
            self.set_mask_pixel_val(
                self.horizons_phil.integration.mask_pixel_value)

        image_obj = self.imagefiles.imageindex(
            self.frame_numbers[self.image_number])
        image_obj.read()
        rawdata = image_obj.linearintdata  # assume image #1

        if self.inputai.active_areas != None:
            self.detector_xy_draft = self.safe_background(
                rawdata=rawdata,
                predicted=self.predicted,
                OS_adapt=OS_adapt,
                sorted=flex_sorted,
                tiles=self.inputai.active_areas.IT,
                tile_id=self.inputai.active_areas.tile_id)
        else:
            self.detector_xy_draft = self.safe_background(
                rawdata=rawdata,
                predicted=self.predicted,
                OS_adapt=OS_adapt,
                sorted=flex_sorted)
        for i in range(len(self.predicted)):  # loop over predicteds
            B_S_mask = {}
            keys = self.get_bsmask(i)
            for k in range(0, len(keys), 2):
                B_S_mask[(keys[k], keys[k + 1])] = True
            self.BSmasks.append(B_S_mask)
        #print "Done"
        return
  def integration_concept_detail(self, experiments, reflections, spots,image_number,cb_op_to_primitive,**kwargs):
    detector = experiments[0].detector
    crystal = experiments[0].crystal
    from cctbx.crystal import symmetry
    c_symmetry = symmetry(space_group = crystal.get_space_group(), unit_cell = crystal.get_unit_cell())

    self.image_number = image_number
    NEAR = 10
    pxlsz = detector[0].get_pixel_size()

    Predicted = self.get_predictions_accounting_for_centering(experiments,reflections,cb_op_to_primitive,**kwargs)

    FWMOSAICITY = self.inputai.getMosaicity()
    self.DOMAIN_SZ_ANG = kwargs.get("domain_size_ang",  self.__dict__.get("actual",0)  )
    refineflag = {True:0,False:1}[kwargs.get("domain_size_ang",0)==0]
    c_symmetry.show_summary(prefix="EXCURSION%1d REPORT FWMOS= %6.4f DOMAIN= %6.1f "%(refineflag,FWMOSAICITY,self.DOMAIN_SZ_ANG))
    from annlib_ext import AnnAdaptor
    self.cell = c_symmetry.unit_cell()

    query = flex.double()
    print len(self.predicted)

    for pred in self.predicted: # predicted spot coord in pixels
      query.append(pred[0]/pxlsz[0])
      query.append(pred[1]/pxlsz[1])

    self.reserve_hkllist_for_signal_search = self.hkllist

    reference = flex.double()

    assert self.length>NEAR# Can't do spot/pred matching with too few spots
    for spot in spots:
      reference.append(spot.ctr_mass_x())
      reference.append(spot.ctr_mass_y())

    IS_adapt = AnnAdaptor(data=reference,dim=2,k=NEAR)
    IS_adapt.query(query)
    idx_cutoff = float(min(self.mask_focus[image_number]))

    from rstbx.apps.slip_helpers import slip_callbacks
    cache_refinement_spots = getattr(slip_callbacks.slip_callback,"requires_refinement_spots",False)

    indexed_pairs_provisional = []
    correction_vectors_provisional = []
    c_v_p_flex = flex.vec3_double()
    this_setting_matched_indices = reflections["miller_index"]
    for j,item in enumerate(this_setting_matched_indices):
      this_setting_index = self.hkllist.first_index(item)
      if this_setting_index:
        Match = dict(spot=j,pred=this_setting_index)
        indexed_pairs_provisional.append(Match)
        vector = matrix.col(
            [reflections["xyzobs.px.value"][j][0] - self.predicted[Match["pred"]][0]/pxlsz[0],
             reflections["xyzobs.px.value"][j][1] - self.predicted[Match["pred"]][1]/pxlsz[1]])
        correction_vectors_provisional.append(vector)
        c_v_p_flex.append((vector[0],vector[1],0.))
    self.N_correction_vectors = len(correction_vectors_provisional)
    self.rmsd_px = math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))
    print "... %d provisional matches"%self.N_correction_vectors,
    print "r.m.s.d. in pixels: %6.3f"%(self.rmsd_px)

    if self.horizons_phil.integration.enable_residual_scatter:
      from matplotlib import pyplot as plt
      fig = plt.figure()
      for cv in correction_vectors_provisional:
        plt.plot([cv[1]],[-cv[0]],"r.")
      plt.title(" %d matches, r.m.s.d. %5.2f pixels"%(len(correction_vectors_provisional),math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))))
      plt.axes().set_aspect("equal")
      self.show_figure(plt,fig,"res")
      plt.close()

    if self.horizons_phil.integration.enable_residual_map:
      from matplotlib import pyplot as plt
      PX = reflections["xyzobs.px.value"]
      fig = plt.figure()
      for match,cv in zip(indexed_pairs_provisional,correction_vectors_provisional):
        plt.plot([PX[match["spot"]][1]],[-PX[match["spot"]][0]],"r.")
        plt.plot([self.predicted[match["pred"]][1]/pxlsz[1]],[-self.predicted[match["pred"]][0]/pxlsz[0]],"g.")
        plt.plot([PX[match["spot"]][1], PX[match["spot"]][1] + 10.*cv[1]],
                 [-PX[match["spot"]][0], -PX[match["spot"]][0] - 10.*cv[0]],'r-')
      if kwargs.get("user-reentrant") != None and self.horizons_phil.integration.spot_prediction == "dials" \
             and self.horizons_phil.integration.enable_residual_map_deltapsi:
        from rstbx.apps.stills.util import residual_map_special_deltapsi_add_on
        residual_map_special_deltapsi_add_on(
          reflections = self.dials_spot_prediction,
          matches = indexed_pairs_provisional, experiments=experiments,
          hkllist = self.hkllist,
          predicted = self.predicted, plot=plt, eta_deg=FWMOSAICITY, deff=self.DOMAIN_SZ_ANG
          )
      plt.xlim([0,detector[0].get_image_size()[1]])
      plt.ylim([-detector[0].get_image_size()[0],0])
      plt.title(" %d matches, r.m.s.d. %5.2f pixels"%(len(correction_vectors_provisional),math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))))
      plt.axes().set_aspect("equal")
      self.show_figure(plt,fig,"map")
      plt.close()

    indexed_pairs = indexed_pairs_provisional
    correction_vectors = correction_vectors_provisional
    ########### skip outlier rejection for this derived class

    ### However must retain the ability to write out correction vectiors.
    if True: # at Aaron's request; test later
      correction_lengths = flex.double([v.length() for v in correction_vectors_provisional])
      clorder = flex.sort_permutation(correction_lengths)
      sorted_cl = correction_lengths.select(clorder)
      indexed_pairs = []
      correction_vectors = []
      self.correction_vectors = []
      for icand in xrange(len(sorted_cl)):
        # somewhat arbitrary sigma = 1.0 cutoff for outliers
        indexed_pairs.append(indexed_pairs_provisional[clorder[icand]])
        correction_vectors.append(correction_vectors_provisional[clorder[icand]])
        if cache_refinement_spots:
          self.spotfinder.images[self.frame_numbers[self.image_number]]["refinement_spots"].append(
          spots[reflections[indexed_pairs[-1]["spot"]]['spotfinder_lookup']])
        if kwargs.get("verbose_cv")==True:
            print "CV OBSCENTER %7.2f %7.2f REFINEDCENTER %7.2f %7.2f"%(
              float(self.inputpd["size1"])/2.,float(self.inputpd["size2"])/2.,
              self.inputai.xbeam()/pxlsz[0], self.inputai.ybeam()/pxlsz[1]),
            print "OBSSPOT %7.2f %7.2f PREDSPOT %7.2f %7.2f"%(
              reflections[indexed_pairs[-1]["spot"]]['xyzobs.px.value'][0],
              reflections[indexed_pairs[-1]["spot"]]['xyzobs.px.value'][1],
              self.predicted[indexed_pairs[-1]["pred"]][0]/pxlsz[0],
              self.predicted[indexed_pairs[-1]["pred"]][1]/pxlsz[1]),
            the_hkl = self.hkllist[indexed_pairs[-1]["pred"]]
            print "HKL %4d %4d %4d"%the_hkl,"%2d"%self.setting_id,
            radial, azimuthal = spots[indexed_pairs[-1]["spot"]].get_radial_and_azimuthal_size(
              self.inputai.xbeam()/pxlsz[0], self.inputai.ybeam()/pxlsz[1])
            print "RADIALpx %5.3f AZIMUTpx %5.3f"%(radial,azimuthal)

        # Store a list of correction vectors in self.
        radial, azimuthal = spots[indexed_pairs[-1]['spot']].get_radial_and_azimuthal_size(
          self.inputai.xbeam()/pxlsz[0], self.inputai.ybeam()/pxlsz[1])
        self.correction_vectors.append(
          dict(obscenter=(float(self.inputpd['size1']) / 2,
                          float(self.inputpd['size2']) / 2),
               refinedcenter=(self.inputai.xbeam() / pxlsz[0],
                              self.inputai.ybeam() / pxlsz[1]),
               obsspot=(reflections[indexed_pairs[-1]['spot']]['xyzobs.px.value'][0],
                        reflections[indexed_pairs[-1]['spot']]['xyzobs.px.value'][1]),
               predspot=(self.predicted[indexed_pairs[-1]['pred']][0] / pxlsz[0],
                         self.predicted[indexed_pairs[-1]['pred']][1] / pxlsz[1]),
               hkl=(self.hkllist[indexed_pairs[-1]['pred']][0],
                    self.hkllist[indexed_pairs[-1]['pred']][1],
                    self.hkllist[indexed_pairs[-1]['pred']][2]),
               setting_id=self.setting_id,
               radial=radial,
               azimuthal=azimuthal))


    self.inputpd["symmetry"] = c_symmetry
    self.inputpd["symmetry"].show_summary(prefix="SETTING ")


    if self.horizons_phil.integration.model == "user_supplied":
      # Not certain of whether the reentrant_* dictionary keys create a memory leak
      if kwargs.get("user-reentrant",None)==None:
        kwargs["reentrant_experiments"] = experiments
        kwargs["reentrant_reflections"] = reflections
        from cxi_user import post_outlier_rejection
        self.indexed_pairs = indexed_pairs
        self.spots = spots
        post_outlier_rejection(self,image_number,cb_op_to_primitive,self.horizons_phil,kwargs)
        return
    ########### finished with user-supplied code


    correction_lengths=flex.double([v.length() for v in correction_vectors])

    self.r_residual = pxlsz[0]*flex.mean(correction_lengths)

    #assert len(indexed_pairs)>NEAR # must have enough indexed spots
    if (len(indexed_pairs) <= NEAR):
      raise Sorry("Not enough indexed spots, only found %d, need %d" % (len(indexed_pairs), NEAR))

    reference = flex.double()
    for item in indexed_pairs:
      reference.append(spots[item["spot"]].ctr_mass_x())
      reference.append(spots[item["spot"]].ctr_mass_y())

    PS_adapt = AnnAdaptor(data=reference,dim=2,k=NEAR)
    PS_adapt.query(query)

    self.BSmasks = []
    # do not use null: self.null_correction_mapping( predicted=self.predicted,
    self.positional_correction_mapping( predicted=self.predicted,
                                        correction_vectors = correction_vectors,
                                        PS_adapt = PS_adapt,
                                        IS_adapt = IS_adapt,
                                        spots = spots)

    # which spots are close enough to interfere with background?
    MAXOVER=6
    OS_adapt = AnnAdaptor(data=query,dim=2,k=MAXOVER) #six near nbrs
    OS_adapt.query(query)
    if self.mask_focus[image_number] is None:
      raise Sorry("No observed/predicted spot agreement; no Spotfinder masks; skip integration")
    nbr_cutoff = 2.0* max(self.mask_focus[image_number])
    FRAME = int(nbr_cutoff/2)
    #print "The overlap cutoff is %d pixels"%nbr_cutoff
    nbr_cutoff_sq = nbr_cutoff * nbr_cutoff

    #print "Optimized C++ section...",
    self.set_frame(FRAME)
    self.set_background_factor(kwargs["background_factor"])
    self.set_nbr_cutoff_sq(nbr_cutoff_sq)
    self.set_guard_width_sq(self.horizons_phil.integration.guard_width_sq)
    self.set_detector_gain(self.horizons_phil.integration.detector_gain)
    flex_sorted = flex.int()
    for item in self.sorted:
      flex_sorted.append(item[0]);flex_sorted.append(item[1]);

    if self.horizons_phil.integration.mask_pixel_value is not None:
      self.set_mask_pixel_val(self.horizons_phil.integration.mask_pixel_value)

    image_obj = self.imagefiles.imageindex(self.frame_numbers[self.image_number])
    image_obj.read()
    rawdata = image_obj.linearintdata # assume image #1

    if self.inputai.active_areas != None:
      self.detector_xy_draft = self.safe_background( rawdata=rawdata,
                          predicted=self.predicted,
                          OS_adapt=OS_adapt,
                          sorted=flex_sorted,
                          tiles=self.inputai.active_areas.IT,
                          tile_id=self.inputai.active_areas.tile_id);
    else:
      self.detector_xy_draft = self.safe_background( rawdata=rawdata,
                          predicted=self.predicted,
                          OS_adapt=OS_adapt,
                          sorted=flex_sorted);
    for i in xrange(len(self.predicted)): # loop over predicteds
      B_S_mask = {}
      keys = self.get_bsmask(i)
      for k in xrange(0,len(keys),2):
        B_S_mask[(keys[k],keys[k+1])]=True
      self.BSmasks.append(B_S_mask)
    #print "Done"
    return
    def integration_concept_detail(self, experiments, reflections, spots,
                                   image_number, cb_op_to_primitive, **kwargs):
        detector = experiments[0].detector
        crystal = experiments[0].crystal
        from cctbx.crystal import symmetry
        c_symmetry = symmetry(space_group=crystal.get_space_group(),
                              unit_cell=crystal.get_unit_cell())

        self.image_number = image_number
        NEAR = 10
        pxlsz = detector[0].get_pixel_size()

        Predicted = self.get_predictions_accounting_for_centering(
            experiments, reflections, cb_op_to_primitive, **kwargs)

        FWMOSAICITY = self.inputai.getMosaicity()
        self.DOMAIN_SZ_ANG = kwargs.get("domain_size_ang",
                                        self.__dict__.get("actual", 0))
        refineflag = {True: 0, False: 1}[kwargs.get("domain_size_ang", 0) == 0]
        c_symmetry.show_summary(
            prefix="EXCURSION%1d REPORT FWMOS= %6.4f DOMAIN= %6.1f " %
            (refineflag, FWMOSAICITY, self.DOMAIN_SZ_ANG))
        from annlib_ext import AnnAdaptor
        self.cell = c_symmetry.unit_cell()

        query = flex.double()
        print len(self.predicted)

        for pred in self.predicted:  # predicted spot coord in pixels
            query.append(pred[0] / pxlsz[0])
            query.append(pred[1] / pxlsz[1])

        self.reserve_hkllist_for_signal_search = self.hkllist

        reference = flex.double()

        assert self.length > NEAR  # Can't do spot/pred matching with too few spots
        for spot in spots:
            reference.append(spot.ctr_mass_x())
            reference.append(spot.ctr_mass_y())

        IS_adapt = AnnAdaptor(data=reference, dim=2, k=NEAR)
        IS_adapt.query(query)
        idx_cutoff = float(min(self.mask_focus[image_number]))

        from rstbx.apps.slip_helpers import slip_callbacks
        cache_refinement_spots = getattr(slip_callbacks.slip_callback,
                                         "requires_refinement_spots", False)

        indexed_pairs_provisional = []
        correction_vectors_provisional = []
        c_v_p_flex = flex.vec3_double()
        this_setting_matched_indices = reflections["miller_index"]
        for j, item in enumerate(this_setting_matched_indices):
            this_setting_index = self.hkllist.first_index(item)
            if this_setting_index:
                Match = dict(spot=j, pred=this_setting_index)
                indexed_pairs_provisional.append(Match)
                vector = matrix.col([
                    reflections["xyzobs.px.value"][j][0] -
                    self.predicted[Match["pred"]][0] / pxlsz[0],
                    reflections["xyzobs.px.value"][j][1] -
                    self.predicted[Match["pred"]][1] / pxlsz[1]
                ])
                correction_vectors_provisional.append(vector)
                c_v_p_flex.append((vector[0], vector[1], 0.))
        self.N_correction_vectors = len(correction_vectors_provisional)
        self.rmsd_px = math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))
        print "... %d provisional matches" % self.N_correction_vectors,
        print "r.m.s.d. in pixels: %6.3f" % (self.rmsd_px)

        if self.horizons_phil.integration.enable_residual_scatter:
            from matplotlib import pyplot as plt
            fig = plt.figure()
            for cv in correction_vectors_provisional:
                plt.plot([cv[1]], [-cv[0]], "r.")
            plt.title(" %d matches, r.m.s.d. %5.2f pixels" %
                      (len(correction_vectors_provisional),
                       math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))))
            plt.axes().set_aspect("equal")
            self.show_figure(plt, fig, "res")
            plt.close()

        if self.horizons_phil.integration.enable_residual_map:
            from matplotlib import pyplot as plt
            PX = reflections["xyzobs.px.value"]
            fig = plt.figure()
            for match, cv in zip(indexed_pairs_provisional,
                                 correction_vectors_provisional):
                plt.plot([PX[match["spot"]][1]], [-PX[match["spot"]][0]], "r.")
                plt.plot([self.predicted[match["pred"]][1] / pxlsz[1]],
                         [-self.predicted[match["pred"]][0] / pxlsz[0]], "g.")
                plt.plot(
                    [PX[match["spot"]][1], PX[match["spot"]][1] + 10. * cv[1]],
                    [
                        -PX[match["spot"]][0],
                        -PX[match["spot"]][0] - 10. * cv[0]
                    ], 'r-')
            if kwargs.get("user-reentrant") != None and self.horizons_phil.integration.spot_prediction == "dials" \
                   and self.horizons_phil.integration.enable_residual_map_deltapsi:
                from rstbx.apps.stills.util import residual_map_special_deltapsi_add_on
                residual_map_special_deltapsi_add_on(
                    reflections=self.dials_spot_prediction,
                    matches=indexed_pairs_provisional,
                    experiments=experiments,
                    hkllist=self.hkllist,
                    predicted=self.predicted,
                    plot=plt,
                    eta_deg=FWMOSAICITY,
                    deff=self.DOMAIN_SZ_ANG)
            plt.xlim([0, detector[0].get_image_size()[1]])
            plt.ylim([-detector[0].get_image_size()[0], 0])
            plt.title(" %d matches, r.m.s.d. %5.2f pixels" %
                      (len(correction_vectors_provisional),
                       math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))))
            plt.axes().set_aspect("equal")
            self.show_figure(plt, fig, "map")
            plt.close()

        indexed_pairs = indexed_pairs_provisional
        correction_vectors = correction_vectors_provisional
        ########### skip outlier rejection for this derived class

        ### However must retain the ability to write out correction vectiors.
        if True:  # at Aaron's request; test later
            correction_lengths = flex.double(
                [v.length() for v in correction_vectors_provisional])
            clorder = flex.sort_permutation(correction_lengths)
            sorted_cl = correction_lengths.select(clorder)
            indexed_pairs = []
            correction_vectors = []
            self.correction_vectors = []
            for icand in xrange(len(sorted_cl)):
                # somewhat arbitrary sigma = 1.0 cutoff for outliers
                indexed_pairs.append(indexed_pairs_provisional[clorder[icand]])
                correction_vectors.append(
                    correction_vectors_provisional[clorder[icand]])
                if cache_refinement_spots:
                    self.spotfinder.images[self.frame_numbers[
                        self.image_number]]["refinement_spots"].append(
                            spots[reflections[indexed_pairs[-1]["spot"]]
                                  ['spotfinder_lookup']])
                if kwargs.get("verbose_cv") == True:
                    print "CV OBSCENTER %7.2f %7.2f REFINEDCENTER %7.2f %7.2f" % (
                        float(self.inputpd["size1"]) / 2.,
                        float(self.inputpd["size2"]) / 2.,
                        self.inputai.xbeam() / pxlsz[0],
                        self.inputai.ybeam() / pxlsz[1]),
                    print "OBSSPOT %7.2f %7.2f PREDSPOT %7.2f %7.2f" % (
                        reflections[indexed_pairs[-1]["spot"]]
                        ['xyzobs.px.value'][0], reflections[
                            indexed_pairs[-1]["spot"]]['xyzobs.px.value'][1],
                        self.predicted[indexed_pairs[-1]["pred"]][0] /
                        pxlsz[0], self.predicted[indexed_pairs[-1]["pred"]][1]
                        / pxlsz[1]),
                    the_hkl = self.hkllist[indexed_pairs[-1]["pred"]]
                    print "HKL %4d %4d %4d" % the_hkl, "%2d" % self.setting_id,
                    radial, azimuthal = spots[indexed_pairs[-1][
                        "spot"]].get_radial_and_azimuthal_size(
                            self.inputai.xbeam() / pxlsz[0],
                            self.inputai.ybeam() / pxlsz[1])
                    print "RADIALpx %5.3f AZIMUTpx %5.3f" % (radial, azimuthal)

                # Store a list of correction vectors in self.
                radial, azimuthal = spots[
                    indexed_pairs[-1]['spot']].get_radial_and_azimuthal_size(
                        self.inputai.xbeam() / pxlsz[0],
                        self.inputai.ybeam() / pxlsz[1])
                self.correction_vectors.append(
                    dict(obscenter=(float(self.inputpd['size1']) / 2,
                                    float(self.inputpd['size2']) / 2),
                         refinedcenter=(self.inputai.xbeam() / pxlsz[0],
                                        self.inputai.ybeam() / pxlsz[1]),
                         obsspot=(reflections[indexed_pairs[-1]
                                              ['spot']]['xyzobs.px.value'][0],
                                  reflections[indexed_pairs[-1]
                                              ['spot']]['xyzobs.px.value'][1]),
                         predspot=(
                             self.predicted[indexed_pairs[-1]['pred']][0] /
                             pxlsz[0],
                             self.predicted[indexed_pairs[-1]['pred']][1] /
                             pxlsz[1]),
                         hkl=(self.hkllist[indexed_pairs[-1]['pred']][0],
                              self.hkllist[indexed_pairs[-1]['pred']][1],
                              self.hkllist[indexed_pairs[-1]['pred']][2]),
                         setting_id=self.setting_id,
                         radial=radial,
                         azimuthal=azimuthal))

        self.inputpd["symmetry"] = c_symmetry
        self.inputpd["symmetry"].show_summary(prefix="SETTING ")

        if self.horizons_phil.integration.model == "user_supplied":
            # Not certain of whether the reentrant_* dictionary keys create a memory leak
            if kwargs.get("user-reentrant", None) == None:
                kwargs["reentrant_experiments"] = experiments
                kwargs["reentrant_reflections"] = reflections
                from cxi_user import post_outlier_rejection
                self.indexed_pairs = indexed_pairs
                self.spots = spots
                post_outlier_rejection(self, image_number, cb_op_to_primitive,
                                       self.horizons_phil, kwargs)
                return
        ########### finished with user-supplied code

        correction_lengths = flex.double(
            [v.length() for v in correction_vectors])

        self.r_residual = pxlsz[0] * flex.mean(correction_lengths)

        #assert len(indexed_pairs)>NEAR # must have enough indexed spots
        if (len(indexed_pairs) <= NEAR):
            raise Sorry("Not enough indexed spots, only found %d, need %d" %
                        (len(indexed_pairs), NEAR))

        reference = flex.double()
        for item in indexed_pairs:
            reference.append(spots[item["spot"]].ctr_mass_x())
            reference.append(spots[item["spot"]].ctr_mass_y())

        PS_adapt = AnnAdaptor(data=reference, dim=2, k=NEAR)
        PS_adapt.query(query)

        self.BSmasks = []
        # do not use null: self.null_correction_mapping( predicted=self.predicted,
        self.positional_correction_mapping(
            predicted=self.predicted,
            correction_vectors=correction_vectors,
            PS_adapt=PS_adapt,
            IS_adapt=IS_adapt,
            spots=spots)

        # which spots are close enough to interfere with background?
        MAXOVER = 6
        OS_adapt = AnnAdaptor(data=query, dim=2, k=MAXOVER)  #six near nbrs
        OS_adapt.query(query)
        if self.mask_focus[image_number] is None:
            raise Sorry(
                "No observed/predicted spot agreement; no Spotfinder masks; skip integration"
            )
        nbr_cutoff = 2.0 * max(self.mask_focus[image_number])
        FRAME = int(nbr_cutoff / 2)
        #print "The overlap cutoff is %d pixels"%nbr_cutoff
        nbr_cutoff_sq = nbr_cutoff * nbr_cutoff

        #print "Optimized C++ section...",
        self.set_frame(FRAME)
        self.set_background_factor(kwargs["background_factor"])
        self.set_nbr_cutoff_sq(nbr_cutoff_sq)
        self.set_guard_width_sq(self.horizons_phil.integration.guard_width_sq)
        self.set_detector_gain(self.horizons_phil.integration.detector_gain)
        flex_sorted = flex.int()
        for item in self.sorted:
            flex_sorted.append(item[0])
            flex_sorted.append(item[1])

        if self.horizons_phil.integration.mask_pixel_value is not None:
            self.set_mask_pixel_val(
                self.horizons_phil.integration.mask_pixel_value)

        image_obj = self.imagefiles.imageindex(
            self.frame_numbers[self.image_number])
        image_obj.read()
        rawdata = image_obj.linearintdata  # assume image #1

        if self.inputai.active_areas != None:
            self.detector_xy_draft = self.safe_background(
                rawdata=rawdata,
                predicted=self.predicted,
                OS_adapt=OS_adapt,
                sorted=flex_sorted,
                tiles=self.inputai.active_areas.IT,
                tile_id=self.inputai.active_areas.tile_id)
        else:
            self.detector_xy_draft = self.safe_background(
                rawdata=rawdata,
                predicted=self.predicted,
                OS_adapt=OS_adapt,
                sorted=flex_sorted)
        for i in xrange(len(self.predicted)):  # loop over predicteds
            B_S_mask = {}
            keys = self.get_bsmask(i)
            for k in xrange(0, len(keys), 2):
                B_S_mask[(keys[k], keys[k + 1])] = True
            self.BSmasks.append(B_S_mask)
        #print "Done"
        return
  def integration_concept(self,image_number=0,cb_op_to_primitive=None,verbose=False,**kwargs):
    self.image_number = image_number
    NEAR = 10
    pxlsz = self.pixel_size
    self.get_predictions_accounting_for_centering(cb_op_to_primitive,**kwargs)
    FWMOSAICITY = self.inputai.getMosaicity()
    DOMAIN_SZ_ANG = kwargs.get("domain_size_ang",  self.__dict__.get("actual",0)  )
    refineflag = {True:0,False:1}[kwargs.get("domain_size_ang",0)==0]
    self.inputpd["symmetry"].show_summary(prefix="EXCURSION%1d REPORT FWMOS= %6.4f DOMAIN= %6.1f "%(refineflag,FWMOSAICITY,DOMAIN_SZ_ANG))
    from annlib_ext import AnnAdaptor
    self.cell = self.inputai.getOrientation().unit_cell()
    query = flex.double()
    for pred in self.predicted: # predicted spot coord in pixels
      query.append(pred[0]/pxlsz)
      query.append(pred[1]/pxlsz)
    self.reserve_hkllist_for_signal_search = self.hkllist

    reference = flex.double()
    spots = self.get_observations_with_outlier_removal()

    assert len(spots)>NEAR# Can't do spot/pred matching with too few spots
    for spot in spots:
      reference.append(spot.ctr_mass_x())
      reference.append(spot.ctr_mass_y())

    IS_adapt = AnnAdaptor(data=reference,dim=2,k=NEAR)
    IS_adapt.query(query)
    print "Calculate correction vectors for %d observations & %d predictions"%(len(spots),len(self.predicted))
    indexed_pairs_provisional = []
    correction_vectors_provisional = []
    c_v_p_flex = flex.vec3_double()
    idx_cutoff = float(min(self.mask_focus[image_number]))
    if verbose:
      print "idx_cutoff distance in pixels",idx_cutoff
    if not self.horizons_phil.integration.enable_one_to_one_safeguard:
     # legacy code, no safeguard against many-to-one predicted-to-observation mapping
     for i in xrange(len(self.predicted)): # loop over predicteds
      #for n in xrange(NEAR): # loop over near spotfinder spots
      for n in xrange(1): # only consider the nearest spotfinder spot
        Match = dict(spot=IS_adapt.nn[i*NEAR+n],pred=i)
        if n==0 and math.sqrt(IS_adapt.distances[i*NEAR+n]) < idx_cutoff:
          indexed_pairs_provisional.append(Match)

          vector = matrix.col(
            [spots[Match["spot"]].ctr_mass_x() - self.predicted[Match["pred"]][0]/pxlsz,
             spots[Match["spot"]].ctr_mass_y() - self.predicted[Match["pred"]][1]/pxlsz])
          correction_vectors_provisional.append(vector)
          c_v_p_flex.append((vector[0],vector[1],0.))
    else:
      one_to_one = {}
      for i in xrange(len(self.predicted)): # loop over predicteds
        annresultidx = i*NEAR
        obsidx = IS_adapt.nn[annresultidx]
        this_distancesq = IS_adapt.distances[annresultidx]
        if not one_to_one.has_key(obsidx) or \
           this_distancesq < one_to_one[obsidx]["distancesq"]:
           if math.sqrt(this_distancesq) < idx_cutoff:
             one_to_one[obsidx] = dict(spot=obsidx,pred=i,distancesq=this_distancesq)
      for key,value in one_to_one.items():
        indexed_pairs_provisional.append(value)
        vector = matrix.col(
            [spots[value["spot"]].ctr_mass_x() - self.predicted[value["pred"]][0]/pxlsz,
             spots[value["spot"]].ctr_mass_y() - self.predicted[value["pred"]][1]/pxlsz])
        correction_vectors_provisional.append(vector)
        c_v_p_flex.append((vector[0],vector[1],0.))

    print "... %d provisional matches"%len(correction_vectors_provisional),
    print "r.m.s.d. in pixels: %5.2f"%(math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex))))

    if self.horizons_phil.integration.enable_residual_scatter:
      from matplotlib import pyplot as plt
      fig = plt.figure()
      for cv in correction_vectors_provisional:
        plt.plot([cv[1]],[-cv[0]],"b.")
      plt.title(" %d matches, r.m.s.d. %5.2f pixels"%(len(correction_vectors_provisional),math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))))
      plt.axes().set_aspect("equal")
      self.show_figure(plt,fig,"res")
      plt.close()

    if self.horizons_phil.integration.enable_residual_map:
      from matplotlib import pyplot as plt
      fig = plt.figure()
      for match,cv in zip(indexed_pairs_provisional,correction_vectors_provisional):
        plt.plot([spots[match["spot"]].ctr_mass_y()],[-spots[match["spot"]].ctr_mass_x()],"r.")
        plt.plot([self.predicted[match["pred"]][1]/pxlsz],[-self.predicted[match["pred"]][0]/pxlsz],"g.")
        plt.plot([spots[match["spot"]].ctr_mass_y(), spots[match["spot"]].ctr_mass_y() + 10.*cv[1]],
                 [-spots[match["spot"]].ctr_mass_x(), -spots[match["spot"]].ctr_mass_x() - 10.*cv[0]],'b-')
      plt.xlim([0,float(self.inputpd["size2"])])
      plt.ylim([-float(self.inputpd["size1"]),0])
      plt.title(" %d matches, r.m.s.d. %5.2f pixels"%(len(correction_vectors_provisional),math.sqrt(flex.mean(c_v_p_flex.dot(c_v_p_flex)))))
      plt.axes().set_aspect("equal")
      self.show_figure(plt,fig,"map")
      plt.close()
    # insert code here to remove correction length outliers...
    # they are causing terrible
    # problems for finding legitimate correction vectors (print out the list)
    # also remove outliers for the purpose of reporting RMS
    outlier_rejection = True
    cache_refinement_spots = getattr(slip_callbacks.slip_callback,"requires_refinement_spots",False)
    if outlier_rejection:
      correction_lengths = flex.double([v.length() for v in correction_vectors_provisional])
      clorder = flex.sort_permutation(correction_lengths)
      sorted_cl = correction_lengths.select(clorder)

      ACCEPTABLE_LIMIT = 2
      limit = int(0.33 * len(sorted_cl)) # best 1/3 of data are assumed to be correctly modeled.
      if (limit <= ACCEPTABLE_LIMIT):
        raise Sorry("Not enough indexed spots to reject outliers; have %d need >%d" % (limit, ACCEPTABLE_LIMIT))

      y_data = flex.double(len(sorted_cl))
      for i in xrange(len(y_data)):
        y_data[i] = float(i)/float(len(y_data))

      # ideas are explained in Sauter & Poon (2010) J Appl Cryst 43, 611-616.
      from rstbx.outlier_spots.fit_distribution import fit_cdf,rayleigh
      fitted_rayleigh = fit_cdf(x_data = sorted_cl[0:limit],
                                y_data = y_data[0:limit],
                                distribution=rayleigh)

      inv_cdf = [fitted_rayleigh.distribution.inv_cdf(cdf) for cdf in y_data]

      #print "SORTED LIST OF ",len(sorted_cl), "with sigma",fitted_rayleigh.distribution.sigma
      indexed_pairs = []
      correction_vectors = []
      self.correction_vectors = []
      for icand in xrange(len(sorted_cl)):
        # somewhat arbitrary sigma = 1.0 cutoff for outliers
        if (sorted_cl[icand]-inv_cdf[icand])/fitted_rayleigh.distribution.sigma > 1.0:
          break
        indexed_pairs.append(indexed_pairs_provisional[clorder[icand]])
        correction_vectors.append(correction_vectors_provisional[clorder[icand]])
        if cache_refinement_spots:
          self.spotfinder.images[self.frame_numbers[self.image_number]]["refinement_spots"].append(
          spots[indexed_pairs[-1]["spot"]])
        if kwargs.get("verbose_cv")==True:
            print "CV OBSCENTER %7.2f %7.2f REFINEDCENTER %7.2f %7.2f"%(
              float(self.inputpd["size1"])/2.,float(self.inputpd["size2"])/2.,
              self.inputai.xbeam()/pxlsz, self.inputai.ybeam()/pxlsz),
            print "OBSSPOT %7.2f %7.2f PREDSPOT %7.2f %7.2f"%(
              spots[indexed_pairs[-1]["spot"]].ctr_mass_x(),
              spots[indexed_pairs[-1]["spot"]].ctr_mass_y(),
              self.predicted[indexed_pairs[-1]["pred"]][0]/pxlsz,
              self.predicted[indexed_pairs[-1]["pred"]][1]/pxlsz),
            the_hkl = self.hkllist[indexed_pairs[-1]["pred"]]
            print "HKL %4d %4d %4d"%the_hkl,"%2d"%self.setting_id,
            radial, azimuthal = spots[indexed_pairs[-1]["spot"]].get_radial_and_azimuthal_size(
              self.inputai.xbeam()/pxlsz, self.inputai.ybeam()/pxlsz)
            print "RADIALpx %5.3f AZIMUTpx %5.3f"%(radial,azimuthal)

        # Store a list of correction vectors in self.
        radial, azimuthal = spots[indexed_pairs[-1]['spot']].get_radial_and_azimuthal_size(
          self.inputai.xbeam()/pxlsz, self.inputai.ybeam()/pxlsz)
        self.correction_vectors.append(
          dict(obscenter=(float(self.inputpd['size1']) / 2,
                          float(self.inputpd['size2']) / 2),
               refinedcenter=(self.inputai.xbeam() / pxlsz,
                              self.inputai.ybeam() / pxlsz),
               obsspot=(spots[indexed_pairs[-1]['spot']].ctr_mass_x(),
                        spots[indexed_pairs[-1]['spot']].ctr_mass_y()),
               predspot=(self.predicted[indexed_pairs[-1]['pred']][0] / pxlsz,
                         self.predicted[indexed_pairs[-1]['pred']][1] / pxlsz),
               hkl=(self.hkllist[indexed_pairs[-1]['pred']][0],
                    self.hkllist[indexed_pairs[-1]['pred']][1],
                    self.hkllist[indexed_pairs[-1]['pred']][2]),
               setting_id=self.setting_id,
               radial=radial,
               azimuthal=azimuthal))

      print "After outlier rejection %d indexed spotfinder spots remain."%len(indexed_pairs)
      if False:
        rayleigh_cdf = [
          fitted_rayleigh.distribution.cdf(x=sorted_cl[c]) for c in xrange(len(sorted_cl))]
        from matplotlib import pyplot as plt
        plt.plot(sorted_cl,y_data,"r+")
        #plt.plot(sorted_cl,rayleigh_cdf,"g.")
        plt.plot(inv_cdf,y_data,"b.")
        plt.show()
    else:
      indexed_pairs = indexed_pairs_provisional
      correction_vectors = correction_vectors_provisional
    ########### finished with outlier rejection

    self.inputpd["symmetry"].show_summary(prefix="SETTING ")

    is_triclinic = (self.setting_id==1)
    if is_triclinic:
      self.triclinic_pairs = [ dict(pred=self.hkllist[a["pred"]],spot=a["spot"])
        for a in indexed_pairs ]

    if self.horizons_phil.integration.model == "user_supplied":
      if kwargs.get("user-reentrant",None)==None:
        from cxi_user import post_outlier_rejection
        self.indexed_pairs = indexed_pairs
        self.spots = spots
        post_outlier_rejection(self,image_number,cb_op_to_primitive,self.horizons_phil,kwargs)
        return

    ########### finished with user-supplied code

    if self.horizons_phil.integration.spot_shape_verbose:
        from rstbx.new_horizons.spot_shape import spot_shape_verbose
        spot_shape_verbose(rawdata = self.imagefiles.images[self.image_number].linearintdata,
           beam_center_pix = matrix.col((self.inputai.xbeam()/pxlsz, self.inputai.ybeam()/pxlsz)),
           indexed_pairs = indexed_pairs,
           spotfinder_observations = spots,
           distance_mm = self.inputai.distance(),
           mm_per_pixel = pxlsz,
           hkllist = self.hkllist,
           unit_cell = self.cell,
           wavelength_ang = self.inputai.wavelength
        )

    #Other checks to be implemented (future):
    # spot is within active area of detector on a circular detector such as the Mar IP
    # integration masks do not overlap; or deconvolute

    correction_lengths=flex.double([v.length() for v in correction_vectors])
    if verbose:
      print "average correction %5.2f over %d vectors"%(flex.mean(correction_lengths),
      len(correction_lengths)),
      print "or %5.2f mm."%(pxlsz*flex.mean(correction_lengths))
    self.r_residual = pxlsz*flex.mean(correction_lengths)

    #assert len(indexed_pairs)>NEAR # must have enough indexed spots
    if (len(indexed_pairs) <= NEAR):
      raise Sorry("Not enough indexed spots, only found %d, need %d" % (len(indexed_pairs), NEAR))

    reference = flex.double()
    for item in indexed_pairs:
      reference.append(spots[item["spot"]].ctr_mass_x())
      reference.append(spots[item["spot"]].ctr_mass_y())

    PS_adapt = AnnAdaptor(data=reference,dim=2,k=NEAR)
    PS_adapt.query(query)

    self.BSmasks = []
    #self.null_correction_mapping( predicted=self.predicted,
    #                                    correction_vectors = correction_vectors,
    #                                    IS_adapt = IS_adapt,
    #                                    spots = spots)
    self.positional_correction_mapping( predicted=self.predicted,
                                        correction_vectors = correction_vectors,
                                        PS_adapt = PS_adapt,
                                        IS_adapt = IS_adapt,
                                        spots = spots)

    # which spots are close enough to interfere with background?
    MAXOVER=6
    OS_adapt = AnnAdaptor(data=query,dim=2,k=MAXOVER) #six near nbrs
    OS_adapt.query(query)
    if self.mask_focus[image_number] is None:
      raise Sorry("No observed/predicted spot agreement; no Spotfinder masks; skip integration")
    nbr_cutoff = 2.0* max(self.mask_focus[image_number])
    FRAME = int(nbr_cutoff/2)
    #print "The overlap cutoff is %d pixels"%nbr_cutoff
    nbr_cutoff_sq = nbr_cutoff * nbr_cutoff

    #print "Optimized C++ section...",
    self.set_frame(FRAME)
    self.set_background_factor(kwargs["background_factor"])
    self.set_nbr_cutoff_sq(nbr_cutoff_sq)
    self.set_guard_width_sq(self.horizons_phil.integration.guard_width_sq)
    self.set_detector_gain(self.horizons_phil.integration.detector_gain)
    flex_sorted = flex.int()
    for item in self.sorted:
      flex_sorted.append(item[0]);flex_sorted.append(item[1]);

    if self.horizons_phil.integration.mask_pixel_value is not None:
      self.set_mask_pixel_val(self.horizons_phil.integration.mask_pixel_value)

    image_obj = self.imagefiles.imageindex(self.frame_numbers[self.image_number])
    image_obj.read()
    rawdata = image_obj.linearintdata # assume image #1

    if self.inputai.active_areas != None:
      self.detector_xy_draft = self.safe_background( rawdata=rawdata,
                          predicted=self.predicted,
                          OS_adapt=OS_adapt,
                          sorted=flex_sorted,
                          tiles=self.inputai.active_areas.IT,
                          tile_id=self.inputai.active_areas.tile_id);
    else:
      self.detector_xy_draft = self.safe_background( rawdata=rawdata,
                          predicted=self.predicted,
                          OS_adapt=OS_adapt,
                          sorted=flex_sorted);
    for i in xrange(len(self.predicted)): # loop over predicteds
      B_S_mask = {}
      keys = self.get_bsmask(i)
      for k in xrange(0,len(keys),2):
        B_S_mask[(keys[k],keys[k+1])]=True
      self.BSmasks.append(B_S_mask)
    #print "Done"
    return