def resolution_cc_half(self, limit = None, log = None): '''Compute a resolution limit where cc_half < 0.5 (limit if set) or the full extent of the data.''' if limit is None: limit = self._params.cc_half cc_s = flex.double( [b.cc_one_half for b in self._merging_statistics.bins]).reversed() s_s = flex.double( [1/b.d_min**2 for b in self._merging_statistics.bins]).reversed() p = self._params.cc_half_significance_level if p is not None: significance = flex.bool( [b.cc_one_half_significance for b in self._merging_statistics.bins]).reversed() cc_half_critical_value = flex.double( [b.cc_one_half_critical_value for b in self._merging_statistics.bins]).reversed() # index of last insignificant bin i = flex.last_index(significance, False) if i is None or i == len(significance) - 1: i = 0 else: i += 1 else: i = 0 cc_f = fit(s_s[i:], cc_s[i:], 6) stamp("rch: fits") rlimit = limit * max(cc_s) if log: fout = open(log, 'w') for j, s in enumerate(s_s): d = 1.0 / math.sqrt(s) o = cc_s[j] m = cc_f[j] fout.write('%f %f %f %f\n' % (s, d, o, m)) fout.close() try: r_cc = 1.0 / math.sqrt( interpolate_value(s_s[i:], cc_f, rlimit)) except: r_cc = 1.0 / math.sqrt(max(s_s[i:])) stamp("rch: done : %s" % r_cc) if self._params.plot: plot = resolution_plot('CC1/2') plot.plot(s_s[i:], cc_f, label='fit') plot.plot(s_s, cc_s, label='CC1/2') if p is not None: plot.plot( s_s, cc_half_critical_value, label='Confidence limit (p=%g)' %p) plot.plot_resolution_limit(r_cc) plot.savefig('cc_half.png') return r_cc
def reduce_raw_data(raw_data, qmax, bandwidth, level=0.05, q_background=None, outfile=''): log2 = sys.stdout with open(outfile, "a") as log: print >> log, " ==== Data reduction ==== " print >> log, " Preprocessing of data increases efficiency of shape retrieval procedure.\n" print >> log, " - Interpolation stepsize : %4.3e" % bandwidth print >> log, " - Uniform density criteria: level is set to : %4.3e" % level print >> log, " maximum q to consider : %4.3e" % qmax print >> log2, " ==== Data reduction ==== " print >> log2, " Preprocessing of data increases efficiency of shape retrieval procedure.\n" print >> log2, " - Interpolation stepsize : %4.3e" % bandwidth print >> log2, " - Uniform density criteria: level is set to : %4.3e" % level print >> log2, " maximum q to consider : %4.3e" % qmax qmin_indx = flex.max_index(raw_data.i) qmin = raw_data.q[qmin_indx] if qmax > raw_data.q[-1]: qmax = raw_data.q[-1] with open(outfile, "a") as log: print >> log, " Resulting q range to use in search: q start : %4.3e" % qmin print >> log, " q stop : %4.3e" % qmax print >> log2, " Resulting q range to use in search: q start : %4.3e" % qmin print >> log2, " q stop : %4.3e" % qmax raw_q = raw_data.q[qmin_indx:] raw_i = raw_data.i[qmin_indx:] raw_s = raw_data.s[qmin_indx:] ### Take care of the background (set zero at very high q) ### if (q_background is not None): cutoff = flex.bool(raw_q > q_background) q_bk_indx = flex.last_index(cutoff, False) if (q_bk_indx < raw_q.size()): bkgrd = flex.mean(raw_i[q_bk_indx:]) with open(f, "a") as log: print >> log, "Background correction: I=I-background, where background=", bkgrd print >> log2, "Background correction: I=I-background, where background=", bkgrd raw_i = flex.abs(raw_i - bkgrd) q = flex.double(range(int( (qmax - qmin) / bandwidth) + 1)) * bandwidth + qmin raw_data.i = flex.linear_interpolation(raw_q, raw_i, q) raw_data.s = flex.linear_interpolation(raw_q, raw_s, q) raw_data.q = q return raw_data
def had_phase_transition(self): if len(self.differences) < 5: return False i_max = flex.max_index(self.differences) noise_before = (self.differences < self.noise_level_before*self.differences[i_max]) before = flex.last_index(noise_before[:i_max], True) if before is None: before = -1 before += 1 if i_max - before < 4: return False negative_after = self.differences < 0 after = flex.first_index(negative_after[i_max:], True) if after is None: return False after += i_max if after - before < 10: return False if len(self.values) - after < 10: return False tail_stats = scitbx.math.basic_statistics(self.differences[-5:]) if (tail_stats.max_absolute > self.noise_level_after*self.differences[i_max]): return False return True
def had_phase_transition(self): if len(self.differences) < 5: return False i_max = flex.max_index(self.differences) noise_before = (self.differences < self.noise_level_before * self.differences[i_max]) before = flex.last_index(noise_before[:i_max], True) if before is None: before = -1 before += 1 if i_max - before < 4: return False negative_after = self.differences < 0 after = flex.first_index(negative_after[i_max:], True) if after is None: return False after += i_max if after - before < 10: return False if len(self.values) - after < 10: return False tail_stats = scitbx.math.basic_statistics(self.differences[-5:]) if (tail_stats.max_absolute > self.noise_level_after * self.differences[i_max]): return False return True
def resolution_cc_half(self, limit=None, log=None): '''Compute a resolution limit where cc_half < 0.5 (limit if set) or the full extent of the data.''' if limit is None: limit = self._params.cc_half if self._params.cc_half_method == 'sigma_tau': cc_s = flex.double([ b.cc_one_half_sigma_tau for b in self._merging_statistics.bins ]).reversed() else: cc_s = flex.double([ b.cc_one_half for b in self._merging_statistics.bins ]).reversed() s_s = flex.double([ 1 / b.d_min**2 for b in self._merging_statistics.bins ]).reversed() p = self._params.cc_half_significance_level if p is not None: if self._params.cc_half_method == 'sigma_tau': significance = flex.bool([ b.cc_one_half_sigma_tau_significance for b in self._merging_statistics.bins ]).reversed() cc_half_critical_value = flex.double([ b.cc_one_half_sigma_tau_critical_value for b in self._merging_statistics.bins ]).reversed() else: significance = flex.bool([ b.cc_one_half_significance for b in self._merging_statistics.bins ]).reversed() cc_half_critical_value = flex.double([ b.cc_one_half_critical_value for b in self._merging_statistics.bins ]).reversed() # index of last insignificant bin i = flex.last_index(significance, False) if i is None or i == len(significance) - 1: i = 0 else: i += 1 else: i = 0 if self._params.cc_half_fit == 'tanh': cc_f = tanh_fit(s_s[i:], cc_s[i:], iqr_multiplier=4) else: cc_f = fit(s_s[i:], cc_s[i:], 6) stamp("rch: fits") rlimit = limit * max(cc_s) if log: fout = open(log, 'w') for j, s in enumerate(s_s): d = 1.0 / math.sqrt(s) o = cc_s[j] m = cc_f[j] fout.write('%f %f %f %f\n' % (s, d, o, m)) fout.close() try: r_cc = 1.0 / math.sqrt(interpolate_value(s_s[i:], cc_f, rlimit)) except Exception: r_cc = 1.0 / math.sqrt(max(s_s[i:])) stamp("rch: done : %s" % r_cc) if self._params.plot: plot = resolution_plot('CC1/2') plot.plot(s_s[i:], cc_f, label='fit') plot.plot(s_s, cc_s, label='CC1/2') if p is not None: plot.plot(s_s, cc_half_critical_value, label='Confidence limit (p=%g)' % p) plot.plot_resolution_limit(r_cc) plot.savefig('cc_half.png') return r_cc
def resolution_cc_half(self, limit=None): """Compute a resolution limit where cc_half < 0.5 (limit if set) or the full extent of the data.""" if limit is None: limit = self._params.cc_half if self._params.cc_half_method == "sigma_tau": cc_s = flex.double( [b.cc_one_half_sigma_tau for b in self._merging_statistics.bins] ).reversed() else: cc_s = flex.double( [b.cc_one_half for b in self._merging_statistics.bins] ).reversed() s_s = flex.double( [1 / b.d_min ** 2 for b in self._merging_statistics.bins] ).reversed() p = self._params.cc_half_significance_level if p is not None: if self._params.cc_half_method == "sigma_tau": significance = flex.bool( [ b.cc_one_half_sigma_tau_significance for b in self._merging_statistics.bins ] ).reversed() cc_half_critical_value = flex.double( [ b.cc_one_half_sigma_tau_critical_value for b in self._merging_statistics.bins ] ).reversed() else: significance = flex.bool( [b.cc_one_half_significance for b in self._merging_statistics.bins] ).reversed() cc_half_critical_value = flex.double( [ b.cc_one_half_critical_value for b in self._merging_statistics.bins ] ).reversed() # index of last insignificant bin i = flex.last_index(significance, False) if i is None or i == len(significance) - 1: i = 0 else: i += 1 else: i = 0 if self._params.cc_half_fit == "tanh": cc_f = tanh_fit(s_s[i:], cc_s[i:], iqr_multiplier=4) else: cc_f = fit(s_s[i:], cc_s[i:], 6) logger.debug("rch: fits") rlimit = limit * max(cc_s) for j, s in enumerate(s_s[i:]): logger.debug("%f %f %f %f\n", s, 1.0 / math.sqrt(s), cc_s[i + j], cc_f[j]) try: r_cc = 1.0 / math.sqrt(interpolate_value(s_s[i:], cc_f, rlimit)) except Exception: r_cc = 1.0 / math.sqrt(max(s_s[i:])) logger.debug("rch: done : %s", r_cc) if self._params.plot: plot = resolution_plot("CC1/2") plot.plot(s_s[i:], cc_f, label="fit") plot.plot(s_s, cc_s, label="CC1/2") if p is not None: plot.plot( s_s, cc_half_critical_value, label="Confidence limit (p=%g)" % p ) plot.plot_resolution_limit(r_cc) plot.savefig("cc_half.png") return r_cc