def set_fstats(self, fstats): self.all_spots = None self.spots = collections.OrderedDict() self.total_integrated_signal = {} self.mean_integrated_signal= {} self.median_integrated_signal= {} self.n_spots = {} for k in sorted(fstats.nodes.keys()): node = fstats.nodes[k] # XXX some data in node.data will be corrupted (e.g. resolution, wts) but x and y coordinates look ok (it works later). why? if node.descriptor == "spots_total": self.all_spots = node.data else: self.spots[node.descriptor] = node.data # Pre-calculate stats for k in self.keys(): summed_wts = [flex.sum(spot.wts) for spot in self.get_spots(k)] self.intensities[k] = summed_wts self.resolutions[k] = [spot.resolution for spot in self.get_spots(k)] total_summed = flex.sum(flex.double(summed_wts)) if len(summed_wts) > 0: self.mean_integrated_signal[k] = total_summed / len(summed_wts) self.median_integrated_signal[k] = flex.median(flex.double(summed_wts)) else: self.mean_integrated_signal[k] = 0. self.median_integrated_signal[k] = 0. self.total_integrated_signal[k] = total_summed self.n_spots[k] = len(summed_wts)
def calc_mean_intensity(self, pickle_filename, iparams, avg_mode): observations_pickle = read_frame(pickle_filename) pickle_filepaths = pickle_filename.split('/') txt_exception = ' {0:40} ==> '.format( pickle_filepaths[len(pickle_filepaths) - 1]) if observations_pickle is None: txt_exception += 'empty or bad input file\n' return None, txt_exception inputs, txt_organize_input = self.organize_input( observations_pickle, iparams, avg_mode, pickle_filename=pickle_filename) if inputs is not None: observations_original, alpha_angle_obs, spot_pred_x_mm, spot_pred_y_mm, detector_distance_mm, wavelength, crystal_init_orientation = inputs else: txt_exception += txt_organize_input + '\n' return None, txt_exception #filter resolution observations_sel = observations_original.resolution_filter( d_min=iparams.scale.d_min, d_max=iparams.scale.d_max) #filer sigma i_sel = (observations_sel.data() / observations_sel.sigmas()) > iparams.scale.sigma_min if len(observations_sel.data().select(i_sel)) == 0: return None, txt_exception mean_I = flex.median(observations_sel.data().select(i_sel)) return mean_I, txt_exception + 'ok'
def calc_mean_intensity(self, pickle_filename, iparams, avg_mode='average'): observations_pickle = pickle.load(open(pickle_filename, "rb")) wavelength = observations_pickle["wavelength"] pickle_filepaths = pickle_filename.split('/') txt_exception = ' {0:40} ==> '.format( pickle_filepaths[len(pickle_filepaths) - 1]) inputs, txt_organize_input = self.organize_input( observations_pickle, iparams, avg_mode, pickle_filename=pickle_filename) if inputs is not None: observations_original, alpha_angle, spot_pred_x_mm, spot_pred_y_mm, \ detector_distance_mm, identified_isoform, mapped_predictions, xbeam, ybeam = inputs else: txt_exception += txt_organize_input + '\n' return None, txt_exception #filter resolution observations_sel = observations_original.resolution_filter( d_min=iparams.scale.d_min, d_max=iparams.scale.d_max) #filer sigma i_sel = (observations_sel.data() / observations_sel.sigmas()) > iparams.scale.sigma_min if len(observations_sel.data().select(i_sel)) == 0: return None, txt_exception mean_I = flex.median(observations_sel.data().select(i_sel)) return mean_I, txt_exception + 'ok'
def report(O, plot=None, xy_prefix=None): from cctbx.array_family import flex print "Number of shots:", O.completeness_history.size() - 1 print print "Histogram of counts per reflection:" flex.histogram(O.counts.as_double(), n_slots=8).show(prefix=" ", format_cutoffs="%7.0f") print print "Observations per reflection:" flex.show_count_stats(counts=O.counts, prefix=" ") print " Median:", int(flex.median(O.counts.as_double()) + 0.5) print sys.stdout.flush() if (xy_prefix is None): xy_prefix = "" elif (len(xy_prefix) != 0): xy_prefix = xy_prefix + "_" def dump_xy(name, array): f = open(xy_prefix + "%s.xy" % name, "w") for i, c in enumerate(array): print >> f, i, c dump_xy("completeness_history", O.completeness_history) dump_xy("min_count_history", O.min_count_history) if (O.use_symmetry): _ = O.i_calc.asu else: _ = O.i_calc.p1_anom _ = _.customized_copy(data=O.counts).sort(by_value="resolution") sym_factors = _.space_group().order_p() if (not O.i_calc.asu.anomalous_flag()): sym_factors *= 2 sym_factors /= _.multiplicities().data() counts_sorted_by_resolution = _.data().as_int() * sym_factors dump_xy("counts_sorted_by_resolution", counts_sorted_by_resolution) dump_xy("d_spacings_sorted_by_resolution", _.d_spacings().data()) if (plot == "completeness"): from libtbx import pyplot fig = pyplot.figure() ax = fig.add_subplot(1, 1, 1) _ = O.completeness_history nx = _.size() ax.plot(range(nx), _, "r-") ax.axis([0, nx, 0, 1]) pyplot.show() elif (plot == "redundancy"): from libtbx import pyplot fig = pyplot.figure() ax = fig.add_subplot(1, 1, 1) _ = counts_sorted_by_resolution ax.plot(range(len(_)), _, "r-") ax.axis([-_.size() * 0.05, _.size() * 1.05, 0, None]) pyplot.show() elif (plot is not None): raise RuntimeError('Unknown plot type: "%s"' % plot)
def report(O, plot=None, xy_prefix=None): from cctbx.array_family import flex print "Number of shots:", O.completeness_history.size()-1 print print "Histogram of counts per reflection:" flex.histogram(O.counts.as_double(), n_slots=8).show( prefix=" ", format_cutoffs="%7.0f") print print "Observations per reflection:" flex.show_count_stats(counts=O.counts, prefix=" ") print " Median:", int(flex.median(O.counts.as_double())+0.5) print sys.stdout.flush() if (xy_prefix is None): xy_prefix = "" elif (len(xy_prefix) != 0): xy_prefix = xy_prefix + "_" def dump_xy(name, array): f = open(xy_prefix + "%s.xy" % name, "w") for i,c in enumerate(array): print >> f, i, c dump_xy("completeness_history", O.completeness_history) dump_xy("min_count_history", O.min_count_history) if (O.use_symmetry): _ = O.i_calc.asu else: _ = O.i_calc.p1_anom _ = _.customized_copy(data=O.counts).sort(by_value="resolution") sym_factors = _.space_group().order_p() if (not O.i_calc.asu.anomalous_flag()): sym_factors *= 2 sym_factors /= _.multiplicities().data() counts_sorted_by_resolution = _.data().as_int() * sym_factors dump_xy("counts_sorted_by_resolution", counts_sorted_by_resolution) dump_xy("d_spacings_sorted_by_resolution", _.d_spacings().data()) if (plot == "completeness"): from libtbx import pyplot fig = pyplot.figure() ax = fig.add_subplot(1, 1, 1) _ = O.completeness_history nx = _.size() ax.plot(range(nx), _, "r-") ax.axis([0, nx, 0, 1]) pyplot.show() elif (plot == "redundancy"): from libtbx import pyplot fig = pyplot.figure() ax = fig.add_subplot(1, 1, 1) _ = counts_sorted_by_resolution ax.plot(range(len(_)), _, "r-") ax.axis([-_.size()*0.05, _.size()*1.05, 0, None]) pyplot.show() elif (plot is not None): raise RuntimeError('Unknown plot type: "%s"' % plot)
def get_median_integrated_signal(self, key, exclude_resolution_ranges=[]): key = self.find_nearest_key(key) if exclude_resolution_ranges == []: if hasattr(self, "median_integrated_signal"): # backward compatibility return self.median_integrated_signal[key] else: print "NOTE: median is not available. using mean value." return self.mean_integrated_signal[key] else: intensities = filter_spots_with_ex_resolution_range(self.intensities[key], self.resolutions[key], exclude_resolution_ranges) if len(intensities) > 0: return flex.median(flex.double(intensities)) else: return 0
def unit_cell_histograms(crystals): params = [flex.double() for i in range(6)] for cryst in crystals: unit_cell = cryst.get_unit_cell().parameters() for i in range(6): params[i].append(unit_cell[i]) histograms = [] for i in range(6): histograms.append(flex.histogram(params[i], n_slots=100)) median_unit_cell = uctbx.unit_cell([flex.median(p) for p in params]) modal_unit_cell = uctbx.unit_cell( [h.slot_centers()[flex.max_index(h.slots())] for h in histograms] ) print("Modal unit cell: %s" % str(modal_unit_cell)) print("Median unit cell: %s" % str(median_unit_cell)) return histograms
def calc_mean_intensity(self, pickle_filename, iparams, avg_mode='average'): observations_pickle = pickle.load(open(pickle_filename,"rb")) wavelength = observations_pickle["wavelength"] pickle_filepaths = pickle_filename.split('/') txt_exception = ' {0:40} ==> '.format(pickle_filepaths[len(pickle_filepaths)-1]) inputs, txt_organize_input = self.organize_input(observations_pickle, iparams, avg_mode, pickle_filename=pickle_filename) if inputs is not None: observations_original, alpha_angle, spot_pred_x_mm, spot_pred_y_mm, \ detector_distance_mm, identified_isoform, mapped_predictions, xbeam, ybeam = inputs else: txt_exception += txt_organize_input + '\n' return None, txt_exception #filter resolution observations_sel = observations_original.resolution_filter(d_min=iparams.scale.d_min, d_max=iparams.scale.d_max) #filer sigma i_sel = (observations_sel.data()/observations_sel.sigmas()) > iparams.scale.sigma_min if len(observations_sel.data().select(i_sel)) == 0: return None, txt_exception mean_I = flex.median(observations_sel.data().select(i_sel)) return mean_I, txt_exception+'ok'
def set_spots(self, spots): self.all_spots = [] for x, y, d, intensity in spots: self.all_spots.append(DummySpot(x, y, d, intensity)) self.spots["xds"] = range(len(self.all_spots)) for k in self.keys(): summed_wts = [spot.intensity for spot in self.get_spots(k)] self.intensities[k] = summed_wts self.resolutions[k] = [spot.resolution for spot in self.get_spots(k)] # XXX calculate resolution!! total_summed = sum(summed_wts) if len(summed_wts) > 0: self.mean_integrated_signal[k] = total_summed / len(summed_wts) self.median_integrated_signal[k] = flex.median(flex.double(summed_wts)) else: self.mean_integrated_signal[k] = 0. self.median_integrated_signal[k] = 0. self.total_integrated_signal[k] = total_summed self.n_spots[k] = len(summed_wts)
def discover_better_experimental_model(imagesets, spot_lists, params, dps_params, nproc=1, wide_search_binning=1): assert len(imagesets) == len(spot_lists) assert len(imagesets) > 0 # XXX should check that all the detector and beam objects are the same from dials.algorithms.indexing.indexer import indexer_base spot_lists_mm = [ indexer_base.map_spots_pixel_to_mm_rad(spots, imageset.get_detector(), imageset.get_scan()) for spots, imageset in zip(spot_lists, imagesets) ] spot_lists_mm = [] max_cell_list = [] detector = imagesets[0].get_detector() beam = imagesets[0].get_beam() beam_panel = detector.get_panel_intersection(beam.get_s0()) if beam_panel == -1: from libtbx.utils import Sorry raise Sorry('input beam does not intersect detector') for imageset, spots in zip(imagesets, spot_lists): if 'imageset_id' not in spots: spots['imageset_id'] = spots['id'] spots_mm = indexer_base.map_spots_pixel_to_mm_rad( spots=spots, detector=imageset.get_detector(), scan=imageset.get_scan()) indexer_base.map_centroids_to_reciprocal_space( spots_mm, detector=imageset.get_detector(), beam=imageset.get_beam(), goniometer=imageset.get_goniometer()) if dps_params.d_min is not None: d_spacings = 1 / spots_mm['rlp'].norms() sel = d_spacings > dps_params.d_min spots_mm = spots_mm.select(sel) # derive a max_cell from mm spots if params.max_cell is None: from dials.algorithms.indexing.indexer import find_max_cell max_cell = find_max_cell(spots_mm, max_cell_multiplier=1.3, step_size=45).max_cell max_cell_list.append(max_cell) if (params.max_reflections is not None and spots_mm.size() > params.max_reflections): logger.info('Selecting subset of %i reflections for analysis' % params.max_reflections) perm = flex.random_permutation(spots_mm.size()) sel = perm[:params.max_reflections] spots_mm = spots_mm.select(sel) spot_lists_mm.append(spots_mm) if params.max_cell is None: max_cell = flex.median(flex.double(max_cell_list)) else: max_cell = params.max_cell args = [(imageset, spots, max_cell, dps_params) for imageset, spots in zip(imagesets, spot_lists_mm)] from libtbx import easy_mp results = easy_mp.parallel_map(func=run_dps, iterable=args, processes=nproc, method="multiprocessing", preserve_order=True, asynchronous=True, preserve_exception_message=True) solution_lists = [r["solutions"] for r in results] amax_list = [r["amax"] for r in results] assert len(solution_lists) > 0 detector = imagesets[0].get_detector() beam = imagesets[0].get_beam() # perform calculation if dps_params.indexing.improve_local_scope == "origin_offset": discoverer = better_experimental_model_discovery( imagesets, spot_lists_mm, solution_lists, amax_list, dps_params, wide_search_binning=wide_search_binning) new_detector = discoverer.optimize_origin_offset_local_scope() old_panel, old_beam_centre = detector.get_ray_intersection( beam.get_s0()) new_panel, new_beam_centre = new_detector.get_ray_intersection( beam.get_s0()) old_beam_centre_px = detector[old_panel].millimeter_to_pixel( old_beam_centre) new_beam_centre_px = new_detector[new_panel].millimeter_to_pixel( new_beam_centre) logger.info("Old beam centre: %.2f, %.2f mm" % old_beam_centre + " (%.1f, %.1f px)" % old_beam_centre_px) logger.info("New beam centre: %.2f, %.2f mm" % new_beam_centre + " (%.1f, %.1f px)" % new_beam_centre_px) logger.info( "Shift: %.2f, %.2f mm" % (matrix.col(old_beam_centre) - matrix.col(new_beam_centre)).elems + " (%.1f, %.1f px)" % (matrix.col(old_beam_centre_px) - matrix.col(new_beam_centre_px)).elems) return new_detector, beam elif dps_params.indexing.improve_local_scope == "S0_vector": raise NotImplementedError()
def scale_frame_by_mean_I(self, frame_no, pickle_filename, iparams, mean_of_mean_I, avg_mode): observations_pickle = read_frame(pickle_filename) pickle_filepaths = pickle_filename.split('/') img_filename_only = pickle_filepaths[len(pickle_filepaths) - 1] txt_exception = ' {0:40} ==> '.format(img_filename_only) if observations_pickle is None: txt_exception += 'empty or bad input file\n' return None, txt_exception inputs, txt_organize_input = self.organize_input( observations_pickle, iparams, avg_mode, pickle_filename=pickle_filename) if inputs is not None: observations_original, alpha_angle, spot_pred_x_mm, spot_pred_y_mm, detector_distance_mm, wavelength, crystal_init_orientation = inputs else: txt_exception += txt_organize_input + '\n' return None, txt_exception #select only reflections matched with scale input params. #filter by resolution i_sel_res = observations_original.resolution_filter_selection( d_min=iparams.scale.d_min, d_max=iparams.scale.d_max) observations_original_sel = observations_original.select(i_sel_res) alpha_angle_sel = alpha_angle.select(i_sel_res) spot_pred_x_mm_sel = spot_pred_x_mm.select(i_sel_res) spot_pred_y_mm_sel = spot_pred_y_mm.select(i_sel_res) #filter by sigma i_sel_sigmas = ( observations_original_sel.data() / observations_original_sel.sigmas()) > iparams.scale.sigma_min observations_original_sel = observations_original_sel.select( i_sel_sigmas) alpha_angle_sel = alpha_angle_sel.select(i_sel_sigmas) spot_pred_x_mm_sel = spot_pred_x_mm_sel.select(i_sel_sigmas) spot_pred_y_mm_sel = spot_pred_y_mm_sel.select(i_sel_sigmas) observations_non_polar_sel, index_basis_name = self.get_observations_non_polar( observations_original_sel, pickle_filename, iparams) observations_non_polar, index_basis_name = self.get_observations_non_polar( observations_original, pickle_filename, iparams) uc_params = observations_original.unit_cell().parameters() ph = partiality_handler() r0 = ph.calc_spot_radius( sqr(crystal_init_orientation.reciprocal_matrix()), observations_original_sel.indices(), wavelength) #calculate first G (G, B) = (1, 0) stats = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) if mean_of_mean_I > 0: G = flex.median(observations_original_sel.data()) / mean_of_mean_I if iparams.flag_apply_b_by_frame: try: mxh = mx_handler() asu_contents = mxh.get_asu_contents(iparams.n_residues) observations_as_f = observations_non_polar_sel.as_amplitude_array( ) binner_template_asu = observations_as_f.setup_binner( auto_binning=True) wp = statistics.wilson_plot(observations_as_f, asu_contents, e_statistics=True) G = wp.wilson_intensity_scale_factor * 1e2 B = wp.wilson_b except Exception: txt_exception += 'warning B-factor calculation failed.\n' return None, txt_exception two_theta = observations_original.two_theta( wavelength=wavelength).data() sin_theta_over_lambda_sq = observations_original.two_theta( wavelength=wavelength).sin_theta_over_lambda_sq().data() ry, rz, re, voigt_nu, rotx, roty = (0, 0, iparams.gamma_e, iparams.voigt_nu, 0, 0) partiality_init, delta_xy_init, rs_init, rh_init = ph.calc_partiality_anisotropy_set(\ crystal_init_orientation.unit_cell(), rotx, roty, observations_original.indices(), ry, rz, r0, re, voigt_nu, two_theta, alpha_angle, wavelength, crystal_init_orientation, spot_pred_x_mm, spot_pred_y_mm, detector_distance_mm, iparams.partiality_model, iparams.flag_beam_divergence) if iparams.flag_plot_expert: n_bins = 20 binner = observations_original.setup_binner(n_bins=n_bins) binner_indices = binner.bin_indices() avg_partiality_init = flex.double() avg_rs_init = flex.double() avg_rh_init = flex.double() one_dsqr_bin = flex.double() for i in range(1, n_bins + 1): i_binner = (binner_indices == i) if len(observations_original.data().select(i_binner)) > 0: print( binner.bin_d_range(i)[1], flex.mean(partiality_init.select(i_binner)), flex.mean(rs_init.select(i_binner)), flex.mean(rh_init.select(i_binner)), len(partiality_init.select(i_binner))) #monte-carlo merge if iparams.flag_monte_carlo: G = 1 B = 0 partiality_init = flex.double([1] * len(partiality_init)) #save results refined_params = flex.double([ G, B, rotx, roty, ry, rz, r0, re, voigt_nu, uc_params[0], uc_params[1], uc_params[2], uc_params[3], uc_params[4], uc_params[5] ]) pres = postref_results() pres.set_params(observations=observations_non_polar, observations_original=observations_original, refined_params=refined_params, stats=stats, partiality=partiality_init, rs_set=rs_init, rh_set=rh_init, frame_no=frame_no, pickle_filename=pickle_filename, wavelength=wavelength, crystal_orientation=crystal_init_orientation, detector_distance_mm=detector_distance_mm) txt_scale_frame_by_mean_I = ' {0:40} ==> RES:{1:5.2f} NREFL:{2:5d} G:{3:6.4f} B:{4:6.1f} CELL:{5:6.2f} {6:6.2f} {7:6.2f} {8:6.2f} {9:6.2f} {10:6.2f}'.format( img_filename_only + ' (' + index_basis_name + ')', observations_original.d_min(), len(observations_original_sel.data()), G, B, uc_params[0], uc_params[1], uc_params[2], uc_params[3], uc_params[4], uc_params[5]) print(txt_scale_frame_by_mean_I) txt_scale_frame_by_mean_I += '\n' return pres, txt_scale_frame_by_mean_I
def optimize_scalefactors(self, I_r_flex, observations_original, wavelength, crystal_init_orientation, alpha_angle, spot_pred_x_mm, spot_pred_y_mm, iparams, pres_in, observations_non_polar, detector_distance_mm, const_params): ph = partiality_handler() pr_d_min = iparams.postref.scale.d_min pr_d_max = iparams.postref.scale.d_max pr_sigma_min = iparams.postref.scale.sigma_min pr_partiality_min = iparams.postref.scale.partiality_min #filter by resolution observations_original_sel, alpha_angle_sel, spot_pred_x_mm_sel, \ spot_pred_y_mm_sel, I_ref_sel = self.get_filtered_data(\ 'resolution', [pr_d_min, pr_d_max], observations_original, alpha_angle,\ spot_pred_x_mm, spot_pred_y_mm, I_r_flex) #filter by sigma observations_original_sel, alpha_angle_sel, spot_pred_x_mm_sel, \ spot_pred_y_mm_sel, I_ref_sel = self.get_filtered_data(\ 'sigma', [pr_sigma_min], observations_original_sel, alpha_angle_sel,\ spot_pred_x_mm_sel, spot_pred_y_mm_sel, I_ref_sel) I_r_true = I_ref_sel[:] I_o_true = observations_original_sel.data()[:] if pres_in is not None: G, B, b0 = pres_in.G, pres_in.B, pres_in.B else: G = flex.median(I_o_true)/flex.median(I_r_true) B,b0 = (0,0) if iparams.flag_apply_b_by_frame: try: from mod_util import mx_handler mxh = mx_handler() asu_contents = mxh.get_asu_contents(iparams.n_residues) observations_as_f = observations_original_sel.as_amplitude_array() binner_template_asu = observations_as_f.setup_binner(auto_binning=True) wp = statistics.wilson_plot(observations_as_f, asu_contents, e_statistics=True) G = wp.wilson_intensity_scale_factor*1e3 B = wp.wilson_b except Exception: pass refine_mode = 'scale_factor' xinp = flex.double([G,B]) args = (I_r_true, observations_original_sel, wavelength, alpha_angle_sel, crystal_init_orientation, spot_pred_x_mm_sel, spot_pred_y_mm_sel, detector_distance_mm, refine_mode, const_params, b0, None, iparams) lh = lbfgs_handler(current_x=xinp, args=args) G_fin, B_fin = (lh.x[0], lh.x[1]) rotx, roty, ry, rz, r0, re, voigt_nu, a, b, c, alpha, beta, gamma = const_params two_theta = observations_original.two_theta(wavelength=wavelength) sin_theta_over_lambda_sq = two_theta.sin_theta_over_lambda_sq().data() uc = unit_cell((a,b,c,alpha,beta,gamma)) ph = partiality_handler() partiality_init, delta_xy_init, rs_init, dummy = ph.calc_partiality_anisotropy_set(uc, rotx, roty, observations_original.indices(), ry, rz, r0, re, voigt_nu, two_theta.data(), alpha_angle, wavelength, crystal_init_orientation, spot_pred_x_mm, spot_pred_y_mm, detector_distance_mm, iparams.partiality_model, iparams.flag_beam_divergence) I_o_init = ph.calc_full_refl(observations_original.data(), sin_theta_over_lambda_sq, G, B, partiality_init, rs_init) I_o_fin = ph.calc_full_refl(observations_original.data(), sin_theta_over_lambda_sq, G_fin, B_fin, partiality_init, rs_init) SE_of_the_estimate = standard_error_of_the_estimate(I_r_flex, I_o_fin, 2) R_sq = coefficient_of_determination(I_r_flex,I_o_fin)*100 CC_init = flex.linear_correlation(I_r_flex, I_o_init).coefficient() CC_final = flex.linear_correlation(I_r_flex, I_o_fin).coefficient() err_init = (I_r_flex - I_o_init)/observations_original.sigmas() R_init = math.sqrt(flex.sum(err_init**2)) err_final = (I_r_flex - I_o_fin)/observations_original.sigmas() R_final = math.sqrt(flex.sum(err_final**2)) R_xy_init = 0 R_xy_final = 0 CC_iso_init = 0 CC_iso_final = 0 return flex.double(list(lh.x)), (SE_of_the_estimate, R_sq, CC_init, CC_final, R_init, R_final, R_xy_init, R_xy_final, CC_iso_init, CC_iso_final)
def scale_frame_by_mean_I(self, frame_no, pickle_filename, iparams, mean_of_mean_I, avg_mode): observations_pickle = pickle.load(open(pickle_filename, "rb")) pickle_filepaths = pickle_filename.split("/") img_filename_only = pickle_filepaths[len(pickle_filepaths) - 1] inputs, txt_organize_input = self.organize_input( observations_pickle, iparams, avg_mode, pickle_filename=pickle_filename ) txt_exception = " {0:40} ==> ".format(img_filename_only) if inputs is not None: observations_original, alpha_angle, spot_pred_x_mm, spot_pred_y_mm, detector_distance_mm = inputs else: txt_exception += txt_organize_input + "\n" return None, txt_exception wavelength = observations_pickle["wavelength"] crystal_init_orientation = observations_pickle["current_orientation"][0] # select only reflections matched with scale input params. # filter by resolution i_sel_res = observations_original.resolution_filter_selection( d_min=iparams.scale.d_min, d_max=iparams.scale.d_max ) observations_original_sel = observations_original.select(i_sel_res) alpha_angle_sel = alpha_angle.select(i_sel_res) spot_pred_x_mm_sel = spot_pred_x_mm.select(i_sel_res) spot_pred_y_mm_sel = spot_pred_y_mm.select(i_sel_res) # filter by sigma i_sel_sigmas = (observations_original_sel.data() / observations_original_sel.sigmas()) > iparams.scale.sigma_min observations_original_sel = observations_original_sel.select(i_sel_sigmas) alpha_angle_sel = alpha_angle_sel.select(i_sel_sigmas) spot_pred_x_mm_sel = spot_pred_x_mm_sel.select(i_sel_sigmas) spot_pred_y_mm_sel = spot_pred_y_mm_sel.select(i_sel_sigmas) polar_hkl, cc_iso_raw_asu, cc_iso_raw_rev = self.determine_polar( observations_original, iparams, pickle_filename ) observations_non_polar_sel = self.get_observations_non_polar(observations_original_sel, polar_hkl) observations_non_polar = self.get_observations_non_polar(observations_original, polar_hkl) uc_params = observations_original.unit_cell().parameters() from mod_leastsqr import calc_spot_radius r0 = calc_spot_radius( sqr(crystal_init_orientation.reciprocal_matrix()), observations_original_sel.indices(), wavelength ) # calculate first G (G, B) = (1, 0) stats = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) if mean_of_mean_I > 0: G = flex.median(observations_original_sel.data()) / mean_of_mean_I if iparams.flag_apply_b_by_frame: try: from mod_util import mx_handler mxh = mx_handler() asu_contents = mxh.get_asu_contents(iparams.n_residues) observations_as_f = observations_non_polar.as_amplitude_array() binner_template_asu = observations_as_f.setup_binner(auto_binning=True) wp = statistics.wilson_plot(observations_as_f, asu_contents, e_statistics=True) G = wp.wilson_intensity_scale_factor * 1e3 B = wp.wilson_b except Exception: txt_exception += "warning B-factor calculation failed.\n" return None, txt_exception from mod_leastsqr import calc_partiality_anisotropy_set two_theta = observations_original.two_theta(wavelength=wavelength).data() sin_theta_over_lambda_sq = ( observations_original.two_theta(wavelength=wavelength).sin_theta_over_lambda_sq().data() ) ry, rz, re, rotx, roty = (0, 0, iparams.gamma_e, 0, 0) partiality_init, delta_xy_init, rs_init, rh_init = calc_partiality_anisotropy_set( crystal_init_orientation.unit_cell(), rotx, roty, observations_original.indices(), ry, rz, r0, re, two_theta, alpha_angle, wavelength, crystal_init_orientation, spot_pred_x_mm, spot_pred_y_mm, detector_distance_mm, iparams.partiality_model, iparams.flag_beam_divergence, ) if iparams.flag_plot_expert: n_bins = 20 binner = observations_original.setup_binner(n_bins=n_bins) binner_indices = binner.bin_indices() avg_partiality_init = flex.double() avg_rs_init = flex.double() avg_rh_init = flex.double() one_dsqr_bin = flex.double() for i in range(1, n_bins + 1): i_binner = binner_indices == i if len(observations_original.data().select(i_binner)) > 0: print binner.bin_d_range(i)[1], flex.mean(partiality_init.select(i_binner)), flex.mean( rs_init.select(i_binner) ), flex.mean(rh_init.select(i_binner)), len(partiality_init.select(i_binner)) # save results refined_params = flex.double( [ G, B, rotx, roty, ry, rz, r0, re, uc_params[0], uc_params[1], uc_params[2], uc_params[3], uc_params[4], uc_params[5], ] ) pres = postref_results() pres.set_params( observations=observations_non_polar, observations_original=observations_original, refined_params=refined_params, stats=stats, partiality=partiality_init, rs_set=rs_init, rh_set=rh_init, frame_no=frame_no, pickle_filename=pickle_filename, wavelength=wavelength, crystal_orientation=crystal_init_orientation, detector_distance_mm=detector_distance_mm, ) txt_scale_frame_by_mean_I = " {0:40} ==> RES:{1:5.2f} NREFL:{2:5d} G:{3:10.3e} B:{4:7.1f} CELL:{5:6.2f} {6:6.2f} {7:6.2f} {8:6.2f} {9:6.2f} {10:6.2f}".format( img_filename_only + " (" + polar_hkl + ")", observations_original.d_min(), len(observations_original_sel.data()), G, B, uc_params[0], uc_params[1], uc_params[2], uc_params[3], uc_params[4], uc_params[5], ) print txt_scale_frame_by_mean_I txt_scale_frame_by_mean_I += "\n" return pres, txt_scale_frame_by_mean_I
def optimize_scalefactors(self, I_r_flex, observations_original, wavelength, crystal_init_orientation, alpha_angle, spot_pred_x_mm, spot_pred_y_mm, iparams, pres_in, observations_non_polar, detector_distance_mm, const_params): ph = partiality_handler() pr_d_min = iparams.postref.scale.d_min pr_d_max = iparams.postref.scale.d_max pr_sigma_min = iparams.postref.scale.sigma_min pr_partiality_min = iparams.postref.scale.partiality_min #filter by resolution observations_original_sel, alpha_angle_sel, spot_pred_x_mm_sel, \ spot_pred_y_mm_sel, I_ref_sel = self.get_filtered_data(\ 'resolution', [pr_d_min, pr_d_max], observations_original, alpha_angle,\ spot_pred_x_mm, spot_pred_y_mm, I_r_flex) #filter by sigma observations_original_sel, alpha_angle_sel, spot_pred_x_mm_sel, \ spot_pred_y_mm_sel, I_ref_sel = self.get_filtered_data(\ 'sigma', [pr_sigma_min], observations_original_sel, alpha_angle_sel,\ spot_pred_x_mm_sel, spot_pred_y_mm_sel, I_ref_sel) I_r_true = I_ref_sel[:] I_o_true = observations_original_sel.data()[:] if pres_in is not None: G, B, b0 = pres_in.G, pres_in.B, pres_in.B else: G = flex.median(I_o_true) / flex.median(I_r_true) B, b0 = (0, 0) if iparams.flag_apply_b_by_frame: try: from mod_util import mx_handler mxh = mx_handler() asu_contents = mxh.get_asu_contents(iparams.n_residues) observations_as_f = observations_original_sel.as_amplitude_array( ) binner_template_asu = observations_as_f.setup_binner( auto_binning=True) wp = statistics.wilson_plot(observations_as_f, asu_contents, e_statistics=True) G = wp.wilson_intensity_scale_factor * 1e3 B = wp.wilson_b except Exception: pass refine_mode = 'scale_factor' xinp = flex.double([G, B]) args = (I_r_true, observations_original_sel, wavelength, alpha_angle_sel, crystal_init_orientation, spot_pred_x_mm_sel, spot_pred_y_mm_sel, detector_distance_mm, refine_mode, const_params, b0, None, iparams) lh = lbfgs_handler(current_x=xinp, args=args) G_fin, B_fin = (lh.x[0], lh.x[1]) rotx, roty, ry, rz, r0, re, voigt_nu, a, b, c, alpha, beta, gamma = const_params two_theta = observations_original.two_theta(wavelength=wavelength) sin_theta_over_lambda_sq = two_theta.sin_theta_over_lambda_sq().data() uc = unit_cell((a, b, c, alpha, beta, gamma)) ph = partiality_handler() partiality_init, delta_xy_init, rs_init, dummy = ph.calc_partiality_anisotropy_set( uc, rotx, roty, observations_original.indices(), ry, rz, r0, re, voigt_nu, two_theta.data(), alpha_angle, wavelength, crystal_init_orientation, spot_pred_x_mm, spot_pred_y_mm, detector_distance_mm, iparams.partiality_model, iparams.flag_beam_divergence) I_o_init = ph.calc_full_refl(observations_original.data(), sin_theta_over_lambda_sq, G, B, partiality_init, rs_init) I_o_fin = ph.calc_full_refl(observations_original.data(), sin_theta_over_lambda_sq, G_fin, B_fin, partiality_init, rs_init) SE_of_the_estimate = standard_error_of_the_estimate( I_r_flex, I_o_fin, 2) R_sq = coefficient_of_determination(I_r_flex, I_o_fin) * 100 CC_init = flex.linear_correlation(I_r_flex, I_o_init).coefficient() CC_final = flex.linear_correlation(I_r_flex, I_o_fin).coefficient() err_init = (I_r_flex - I_o_init) / observations_original.sigmas() R_init = math.sqrt(flex.sum(err_init**2)) err_final = (I_r_flex - I_o_fin) / observations_original.sigmas() R_final = math.sqrt(flex.sum(err_final**2)) R_xy_init = 0 R_xy_final = 0 CC_iso_init = 0 CC_iso_final = 0 return flex.double(list(lh.x)), (SE_of_the_estimate, R_sq, CC_init, CC_final, R_init, R_final, R_xy_init, R_xy_final, CC_iso_init, CC_iso_final)
def discover_better_experimental_model( imagesets, spot_lists, params, dps_params, nproc=1, wide_search_binning=1): assert len(imagesets) == len(spot_lists) assert len(imagesets) > 0 # XXX should check that all the detector and beam objects are the same from dials.algorithms.indexing.indexer import indexer_base spot_lists_mm = [ indexer_base.map_spots_pixel_to_mm_rad( spots, imageset.get_detector(), imageset.get_scan()) for spots, imageset in zip(spot_lists, imagesets)] spot_lists_mm = [] max_cell_list = [] detector = imagesets[0].get_detector() beam = imagesets[0].get_beam() beam_panel = detector.get_panel_intersection(beam.get_s0()) if beam_panel == -1: from libtbx.utils import Sorry raise Sorry, 'input beam does not intersect detector' for imageset, spots in zip(imagesets, spot_lists): if 'imageset_id' not in spots: spots['imageset_id'] = spots['id'] spots_mm = indexer_base.map_spots_pixel_to_mm_rad( spots=spots, detector=imageset.get_detector(), scan=imageset.get_scan()) indexer_base.map_centroids_to_reciprocal_space( spots_mm, detector=imageset.get_detector(), beam=imageset.get_beam(), goniometer=imageset.get_goniometer()) if dps_params.d_min is not None: d_spacings = 1/spots_mm['rlp'].norms() sel = d_spacings > dps_params.d_min spots_mm = spots_mm.select(sel) # derive a max_cell from mm spots if params.max_cell is None: from dials.algorithms.indexing.indexer import find_max_cell max_cell = find_max_cell(spots_mm, max_cell_multiplier=1.3, step_size=45, nearest_neighbor_percentile=0.05).max_cell max_cell_list.append(max_cell) if (params.max_reflections is not None and spots_mm.size() > params.max_reflections): logger.info('Selecting subset of %i reflections for analysis' %params.max_reflections) perm = flex.random_permutation(spots_mm.size()) sel = perm[:params.max_reflections] spots_mm = spots_mm.select(sel) spot_lists_mm.append(spots_mm) if params.max_cell is None: max_cell = flex.median(flex.double(max_cell_list)) else: max_cell = params.max_cell args = [(imageset, spots, max_cell, dps_params) for imageset, spots in zip(imagesets, spot_lists_mm)] from libtbx import easy_mp results = easy_mp.parallel_map( func=run_dps, iterable=args, processes=nproc, method="multiprocessing", preserve_order=True, asynchronous=True, preserve_exception_message=True) solution_lists = [r["solutions"] for r in results] amax_list = [r["amax"] for r in results] assert len(solution_lists) > 0 detector = imagesets[0].get_detector() beam = imagesets[0].get_beam() # perform calculation if dps_params.indexing.improve_local_scope == "origin_offset": discoverer = better_experimental_model_discovery( imagesets, spot_lists_mm, solution_lists, amax_list, dps_params, wide_search_binning=wide_search_binning) new_detector = discoverer.optimize_origin_offset_local_scope() old_beam_centre = detector.get_ray_intersection(beam.get_s0())[1] new_beam_centre = new_detector.get_ray_intersection(beam.get_s0())[1] logger.info("Old beam centre: %.2f mm, %.2f mm" %old_beam_centre) logger.info("New beam centre: %.2f mm, %.2f mm" %new_beam_centre) logger.info("Shift: %.2f mm, %.2f mm" %( matrix.col(old_beam_centre)-matrix.col(new_beam_centre)).elems) return new_detector, beam elif dps_params.indexing.improve_local_scope=="S0_vector": raise NotImplementedError()