def __init__ (self, i_obs, crystal_symmetry=None, n_bins=None, i_over_sigma_min=2.0, r_merge_max=0.5, r_meas_max=0.5, completeness_min_conservative=0.9, completeness_min_permissive=0.5, cc_one_half_min=0.5) : self.n_bins = n_bins self.i_over_sigma_min = i_over_sigma_min self.r_merge_max = r_merge_max self.r_meas_max = r_meas_max self.completeness_min_conservative = completeness_min_conservative self.completeness_min_permissive = completeness_min_permissive self.cc_one_half_min = cc_one_half_min for attr in self.cutoffs_attr : setattr(self, attr, None) # Decide n_bins if n_bins is None: n_bins = min(200, int(i_obs.complete_set().indices().size()/500.+.5)) # not well tested. #print "n_bins=",n_bins i_obs.setup_binner(n_bins=n_bins) bins = [] for bin in i_obs.binner().range_used(): unmerged = i_obs.select(i_obs.binner().selection(bin)) try: bin_stats = merging_statistics.merging_stats(unmerged, anomalous=False) bins.append(bin_stats) except RuntimeError: # complains that no reflections left after sigma-filtering. continue self.d_min_overall = bins[-1].d_min d_min_last = float("inf") for bin in bins : if (self.i_over_sigma_cut is None) : if (bin.i_over_sigma_mean < self.i_over_sigma_min) : self.i_over_sigma_cut = d_min_last if (self.cc_one_half_cut is None) : if (bin.cc_one_half < self.cc_one_half_min) : self.cc_one_half_cut = d_min_last if (self.r_merge_cut is None) : if (bin.r_merge > self.r_merge_max) : self.r_merge_cut = d_min_last if (self.r_meas_cut is None) : if (bin.r_meas > self.r_meas_max) : self.r_meas_cut = d_min_last if (self.completeness_cut_conservative is None) : if (bin.completeness < completeness_min_conservative) : self.completeness_cut_conservative = d_min_last if (self.completeness_cut_permissive is None) : if (bin.completeness < completeness_min_permissive) : self.completeness_cut_permissive = d_min_last d_min_last = bin.d_min
def initial_est_byfit(self): import scipy.optimize # Up to 200 bins. If few reflections, 50 reflections per bin. At least 9 shells. n_bins = max(min(int(self.i_obs.size() / 50. + .5), 200), 9) self.log_out.write("Using %d bins for initial estimate\n" % n_bins) self.i_obs.setup_binner(n_bins=n_bins) bins = [] s_list, cc_list = [], [] for bin in self.i_obs.binner().range_used(): unmerged = self.i_obs.select(self.i_obs.binner().selection(bin)) try: bin_stats = merging_statistics.merging_stats( unmerged, anomalous=self.anomalous_flag) s_list.append(1. / bin_stats.d_min**2) cc_list.append(bin_stats.cc_one_half) except RuntimeError: # complains that no reflections left after sigma-filtering. continue # Fit curve def fun(x, s, cc): d0, r = x return self.fun_ed_aimless(s, d0, r) - cc # fun() x0 = [0.5 * min(s_list), 1.] #Initial d0, r self.log_out.write(" Initial d0, r = %s\n" % x0) lsq = scipy.optimize.least_squares(fun, x0, args=(s_list, cc_list), loss="soft_l1", f_scale=.1) #lsq = fit_ed(s_list, cc_list) self.shells_and_fit = (s_list, cc_list, lsq.x) self.log_out.write("Least-square result:\n") self.log_out.write(str(lsq)) self.log_out.write("\n") d0, r = lsq.x self.log_out.write(" Final d0, r = %s\n" % lsq.x) d_min = 1. / numpy.sqrt( (numpy.arctanh(1. - 2. * self.cc_half_min) * r + d0)) return d_min
def initial_est_byfit(self): import scipy.optimize # Up to 200 bins. If few reflections, 50 reflections per bin. At least 9 shells. n_bins = max(min(int(self.i_obs.size()/50. + .5), 200), 9) self.log_out.write("Using %d bins for initial estimate\n" % n_bins) self.i_obs.setup_binner(n_bins=n_bins) bins = [] s_list, cc_list = [], [] for bin in self.i_obs.binner().range_used(): unmerged = self.i_obs.select(self.i_obs.binner().selection(bin)) try: bin_stats = merging_statistics.merging_stats(unmerged, anomalous=self.anomalous_flag) s_list.append(1./bin_stats.d_min**2) cc_list.append(bin_stats.cc_one_half) except RuntimeError: # complains that no reflections left after sigma-filtering. continue # Fit curve def fun(x, s, cc): d0, r = x return self.fun_ed_aimless(s,d0,r) - cc # fun() x0 = [0.5*min(s_list), 1.] #Initial d0, r self.log_out.write(" Initial d0, r = %s\n" % x0) lsq = scipy.optimize.least_squares(fun, x0, args=(s_list, cc_list), loss="soft_l1", f_scale=.1) #lsq = fit_ed(s_list, cc_list) self.shells_and_fit = (s_list, cc_list, lsq.x) self.log_out.write("Least-square result:\n") self.log_out.write(str(lsq)) self.log_out.write("\n") d0, r = lsq.x self.log_out.write(" Final d0, r = %s\n" % lsq.x) d_min = 1./numpy.sqrt((numpy.arctanh(1.-2.*self.cc_half_min)*r + d0)) return d_min
def initial_estimate_byfit_cchalf(i_obs, cc_half_min, anomalous_flag, log_out): # Up to 200 bins. If few reflections, 50 reflections per bin. At least 9 shells. n_bins = max(min(int(i_obs.size() / 50. + .5), 200), 9) log_out.write("Using %d bins for initial estimate\n" % n_bins) i_obs.setup_binner(n_bins=n_bins) bins = [] s_list, cc_list = [], [] for bin in i_obs.binner().range_used(): unmerged = i_obs.select(i_obs.binner().selection(bin)) try: bin_stats = merging_statistics.merging_stats( unmerged, anomalous=anomalous_flag) s_list.append(1. / bin_stats.d_min**2) cc_list.append(bin_stats.cc_one_half) except RuntimeError: # complains that no reflections left after sigma-filtering. continue d0, r = fit_curve_for_cchalf(s_list, cc_list, log_out) shells_and_fit = (s_list, cc_list, (d0, r)) d_min = resolution_fitted(d0, r, cc_half_min) return d_min, shells_and_fit
def cc_outer_shell(self, d_min): binner = self.i_obs.setup_binner(d_min=d_min, n_bins=self.n_bins) bin_stats = merging_statistics.merging_stats(self.i_obs.select(binner.selection(binner.range_used()[-1])), anomalous=False) return bin_stats.cc_one_half
def __init__(self, i_obs, crystal_symmetry=None, n_bins=None, i_over_sigma_min=2.0, r_merge_max=0.5, r_meas_max=0.5, completeness_min_conservative=0.9, completeness_min_permissive=0.5, cc_one_half_min=0.5): self.n_bins = n_bins self.i_over_sigma_min = i_over_sigma_min self.r_merge_max = r_merge_max self.r_meas_max = r_meas_max self.completeness_min_conservative = completeness_min_conservative self.completeness_min_permissive = completeness_min_permissive self.cc_one_half_min = cc_one_half_min for attr in self.cutoffs_attr: setattr(self, attr, None) # Decide n_bins if n_bins is None: n_bins = min(200, int(i_obs.complete_set().indices().size() / 500. + .5)) # not well tested. print "n_bins=", n_bins i_obs.setup_binner(n_bins=n_bins) bins = [] for bin in i_obs.binner().range_used(): unmerged = i_obs.select(i_obs.binner().selection(bin)) try: bin_stats = merging_statistics.merging_stats(unmerged, anomalous=False) bins.append(bin_stats) except RuntimeError: # complains that no reflections left after sigma-filtering. continue self.d_min_overall = bins[-1].d_min d_min_last = float("inf") for bin in bins: if (self.i_over_sigma_cut is None): if (bin.i_over_sigma_mean < self.i_over_sigma_min): self.i_over_sigma_cut = d_min_last if (self.cc_one_half_cut is None): if (bin.cc_one_half < self.cc_one_half_min): self.cc_one_half_cut = d_min_last if (self.r_merge_cut is None): if (bin.r_merge > self.r_merge_max): self.r_merge_cut = d_min_last if (self.r_meas_cut is None): if (bin.r_meas > self.r_meas_max): self.r_meas_cut = d_min_last if (self.completeness_cut_conservative is None): if (bin.completeness < completeness_min_conservative): self.completeness_cut_conservative = d_min_last if (self.completeness_cut_permissive is None): if (bin.completeness < completeness_min_permissive): self.completeness_cut_permissive = d_min_last d_min_last = bin.d_min
def cc_outer_shell(self, d_min): binner = self.i_obs.setup_binner(d_min=d_min, n_bins=self.n_bins) bin_stats = merging_statistics.merging_stats(self.i_obs.select( binner.selection(binner.range_used()[-1])), anomalous=False) return bin_stats.cc_one_half