def do_gaussian_fit(scale, mu, sigma): start = mu - 6 * sigma stop = mu + 6 * sigma step = (stop - start) / 1000 x = flex.double(frange(start, stop, step)) y = scale * flex.exp(-flex.pow2(x - mu) / (2 * sigma**2)) fit = curve_fitting.single_gaussian_fit(x, y) assert approx_equal(fit.a, scale, 1e-4) assert approx_equal(fit.b, mu, eps=1e-4) assert approx_equal(fit.c, sigma, eps=1e-4)
def do_gaussian_fit(scale, mu, sigma): start = mu - 6 * sigma stop = mu + 6 * sigma step = (stop - start)/1000 x = flex.double(frange(start, stop, step)) y = scale * flex.exp(-flex.pow2(x - mu) / (2 * sigma**2)) fit = curve_fitting.single_gaussian_fit(x, y) assert approx_equal(fit.a, scale, 1e-4) assert approx_equal(fit.b, mu, eps=1e-4) assert approx_equal(fit.c, sigma, eps=1e-4)
def exercise_savitzky_golay_smoothing(): plot = False def rms(flex_double): return math.sqrt(flex.mean(flex.pow2(flex_double))) for sigma_frac in (0.005, 0.01, 0.05, 0.1): mean = random.randint(-5, 5) scale = flex.random_double() * 10 sigma = flex.random_double() * 5 + 1 gaussian = scitbx.math.curve_fitting.gaussian(scale, mean, sigma) x = flex.double(frange(-20, 20, 0.1)) y = gaussian(x) rand_norm = scitbx.random.normal_distribution(mean=0, sigma=sigma_frac * flex.max_absolute(y)) g = scitbx.random.variate(rand_norm) noise = g(y.size()) y_noisy = y + noise # according to numerical recipes the best results are obtained where the # full window width is between 1 and 2 times the number of points at fwhm # for polynomials of degree 4 half_window = int(round(0.5 * 2.355 * sigma * 10)) y_filtered = savitzky_golay_filter(x, y_noisy, half_window=half_window, degree=4)[1] extracted_noise = y_noisy - y_filtered rms_noise = rms(noise) rms_extracted_noise = rms(extracted_noise) assert is_below_limit(value=abs(rand_norm.sigma - rms_noise) / rand_norm.sigma, limit=0.15) assert is_below_limit( value=abs(rand_norm.sigma - rms_extracted_noise) / rand_norm.sigma, limit=0.15) diff = y_filtered - y assert is_below_limit(value=(rms(diff) / rand_norm.sigma), limit=0.4) if plot: from matplotlib import pyplot pyplot.plot(x, y) pyplot.plot(x, noise) pyplot.scatter(x, y_noisy, marker="x") pyplot.plot(x, y_filtered) pyplot.show() pyplot.plot(x, extracted_noise) pyplot.plot(x, noise) pyplot.show() return
def exercise_savitzky_golay_smoothing(): plot = False def rms(flex_double): return math.sqrt(flex.mean(flex.pow2(flex_double))) for sigma_frac in (0.005, 0.01, 0.05, 0.1): mean = random.randint(-5,5) scale = flex.random_double() * 10 sigma = flex.random_double() * 5 + 1 gaussian = curve_fitting.gaussian(scale, mean, sigma) x = flex.double(frange(-20,20,0.1)) y = gaussian(x) rand_norm = scitbx.random.normal_distribution( mean=0, sigma=sigma_frac*flex.max_absolute(y)) g = scitbx.random.variate(rand_norm) noise = g(y.size()) y_noisy = y + noise # according to numerical recipes the best results are obtained where the # full window width is between 1 and 2 times the number of points at fwhm # for polynomials of degree 4 half_window = int(round(0.5 * 2.355 * sigma * 10)) y_filtered = savitzky_golay_filter(x, y_noisy, half_window=half_window, degree=4)[1] extracted_noise = y_noisy - y_filtered rms_noise = rms(noise) rms_extracted_noise = rms(extracted_noise) assert is_below_limit( value=abs(rand_norm.sigma - rms_noise)/rand_norm.sigma, limit=0.15) assert is_below_limit( value=abs(rand_norm.sigma - rms_extracted_noise)/rand_norm.sigma, limit=0.15) diff = y_filtered - y assert is_below_limit( value=(rms(diff)/ rand_norm.sigma), limit=0.4) if plot: from matplotlib import pyplot pyplot.plot(x, y) pyplot.plot(x, noise) pyplot.scatter(x, y_noisy, marker="x") pyplot.plot(x, y_filtered) pyplot.show() pyplot.plot(x, extracted_noise) pyplot.plot(x, noise) pyplot.show() return
def plot_histogram(self, filename): from matplotlib import pyplot as plt normalised_score = self._normalised_delta_cc_i() plt.figure() bins = frange( math.floor(flex.min(normalised_score)), math.ceil(flex.max(normalised_score)) + 1, step=0.1, ) plt.hist(normalised_score.as_numpy_array(), bins=bins, fill=False) plt.xlabel(r"$\sigma$") plt.ylabel("Frequency") plt.savefig(filename)
def plot_histogram(self, filename): import math from matplotlib import pyplot normalised_score = self._normalised_delta_cc_i() f = pyplot.figure() #bins = range( #int(math.floor(flex.min(normalised_score))), int(math.ceil(flex.max(normalised_score)))+1) from libtbx.utils import frange bins = frange( math.floor(flex.min(normalised_score)), math.ceil(flex.max(normalised_score))+1, step=0.1) n, bins, patches = pyplot.hist(normalised_score.as_numpy_array(), bins=bins, fill=False) pyplot.xlabel(r'$\sigma$') pyplot.ylabel('Frequency') pyplot.savefig(filename)
def exercise_polynomial_fit(): def do_polynomial_fit(x, params): n_terms = len(params) y = flex.double(x.size()) for i in range(len(params)): y += params[i] * flex.pow(x, i) fit = curve_fitting.univariate_polynomial_fit(x, y, degree=n_terms - 1) assert approx_equal(params, fit.params, eps=1e-4) x = flex.double(range(-50, 50)) do_polynomial_fit(x, (2, 3, 5)) # y = 2 + 3x + 5x^2 do_polynomial_fit(x, (-0.0002, -1000)) # y = -0.0002 -1000x for n_terms in range(1, 6): params = [100 * random.random() for i in range(n_terms)] x = flex.double( frange(-random.randint(1, 10), random.randint(1, 10), 0.1)) functor = curve_fitting.univariate_polynomial(*params) fd_grads = finite_differences(functor, x) assert approx_equal(functor.partial_derivatives(x), fd_grads, 1e-4) do_polynomial_fit(x, params)
def exercise_skew_normal_fit(): shape, location, scale = 8.0, 4.0, 2.0 x_obs = flex.double(frange(0, 10, 0.1)) f = curve_fitting.skew_normal(shape, location, scale) y_obs = f(x_obs) if 0: from matplotlib import pyplot pyplot.plot(x_obs, y_obs) pyplot.show() fd_grads = finite_differences(f, x_obs) ana_grads = f.partial_derivatives(x_obs) assert approx_equal(ana_grads, fd_grads, 1e-4) shape, location, scale = 1, 0, 8 starting_f = curve_fitting.skew_normal(shape, location, scale) termination_params = scitbx.lbfgs.termination_parameters( min_iterations=100) fit = curve_fitting.generic_minimiser( [starting_f], x_obs, y_obs, termination_params=termination_params) assert approx_equal(fit.functions[0].params, f.params)
def exercise_polynomial_fit(): def do_polynomial_fit(x, params): n_terms = len(params) y = flex.double(x.size()) for i in range(len(params)): y += params[i] * flex.pow(x, i) fit = curve_fitting.univariate_polynomial_fit(x, y, degree=n_terms-1) assert approx_equal(params, fit.params, eps=1e-4) x = flex.double(range(-50,50)) do_polynomial_fit(x, (2,3,5)) # y = 2 + 3x + 5x^2 do_polynomial_fit(x, (-0.0002, -1000)) # y = -0.0002 -1000x for n_terms in range(1, 6): params = [100*random.random() for i in range(n_terms)] x = flex.double(frange(-random.randint(1,10), random.randint(1,10), 0.1)) functor = curve_fitting.univariate_polynomial(*params) fd_grads = finite_differences(functor, x) assert approx_equal(functor.partial_derivatives(x), fd_grads, 1e-4) do_polynomial_fit(x, params)
def _index_prepare(self): """Prepare to do autoindexing - in XDS terms this will mean calling xycorr, init and colspot on the input images.""" # decide on images to work with logger.debug("XDS INDEX PREPARE:") logger.debug("Wavelength: %.6f", self.get_wavelength()) logger.debug("Distance: %.2f", self.get_distance()) if self._indxr_images == []: _select_images_function = getattr( self, "_index_select_images_%s" % self._index_select_images ) wedges = _select_images_function() for wedge in wedges: self.add_indexer_image_wedge(wedge) self.set_indexer_prepare_done(True) all_images = self.get_matching_images() first = min(all_images) last = max(all_images) # next start to process these - first xycorr xycorr = self.Xycorr() xycorr.set_data_range(first, last) xycorr.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) converter = to_xds(self.get_imageset()) xds_beam_centre = converter.detector_origin xycorr.set_beam_centre(xds_beam_centre[0], xds_beam_centre[1]) for block in self._indxr_images: xycorr.add_spot_range(block[0], block[1]) # FIXME need to set the origin here xycorr.run() for file in ["X-CORRECTIONS.cbf", "Y-CORRECTIONS.cbf"]: self._indxr_payload[file] = xycorr.get_output_data_file(file) # next start to process these - then init if PhilIndex.params.xia2.settings.input.format.dynamic_shadowing: imageset = self._indxr_imagesets[0] masker = ( imageset.get_format_class() .get_instance(imageset.paths()[0]) .get_masker() ) if masker is None: # disable dynamic_shadowing PhilIndex.params.xia2.settings.input.format.dynamic_shadowing = False if PhilIndex.params.xia2.settings.input.format.dynamic_shadowing: # find the region of the scan with the least predicted shadow # to use for background determination in XDS INIT step from dxtbx.model.experiment_list import ExperimentListFactory imageset = self._indxr_imagesets[0] xsweep = self._indxr_sweeps[0] sweep_filename = os.path.join( self.get_working_directory(), "%s_indexed.expt" % xsweep.get_name() ) ExperimentListFactory.from_imageset_and_crystal(imageset, None).as_file( sweep_filename ) from xia2.Wrappers.Dials.ShadowPlot import ShadowPlot shadow_plot = ShadowPlot() shadow_plot.set_working_directory(self.get_working_directory()) auto_logfiler(shadow_plot) shadow_plot.set_sweep_filename(sweep_filename) shadow_plot.set_json_filename( os.path.join( self.get_working_directory(), "%s_shadow_plot.json" % shadow_plot.get_xpid(), ) ) shadow_plot.run() results = shadow_plot.get_results() fraction_shadowed = flex.double(results["fraction_shadowed"]) if flex.max(fraction_shadowed) == 0: PhilIndex.params.xia2.settings.input.format.dynamic_shadowing = False else: scan_points = flex.double(results["scan_points"]) scan = imageset.get_scan() oscillation = scan.get_oscillation() if self._background_images is not None: bg_images = self._background_images bg_range_deg = ( scan.get_angle_from_image_index(bg_images[0]), scan.get_angle_from_image_index(bg_images[1]), ) bg_range_width = bg_range_deg[1] - bg_range_deg[0] min_shadow = 100 best_bg_range = bg_range_deg from libtbx.utils import frange for bg_range_start in frange( flex.min(scan_points), flex.max(scan_points) - bg_range_width, step=oscillation[1], ): bg_range_deg = (bg_range_start, bg_range_start + bg_range_width) sel = (scan_points >= bg_range_deg[0]) & ( scan_points <= bg_range_deg[1] ) mean_shadow = flex.mean(fraction_shadowed.select(sel)) if mean_shadow < min_shadow: min_shadow = mean_shadow best_bg_range = bg_range_deg self._background_images = ( scan.get_image_index_from_angle(best_bg_range[0]), scan.get_image_index_from_angle(best_bg_range[1]), ) logger.debug( "Setting background images: %s -> %s" % self._background_images ) init = self.Init() for file in ["X-CORRECTIONS.cbf", "Y-CORRECTIONS.cbf"]: init.set_input_data_file(file, self._indxr_payload[file]) init.set_data_range(first, last) if self._background_images: init.set_background_range( self._background_images[0], self._background_images[1] ) else: init.set_background_range( self._indxr_images[0][0], self._indxr_images[0][1] ) for block in self._indxr_images: init.add_spot_range(block[0], block[1]) init.run() # at this stage, need to (perhaps) modify the BKGINIT.cbf image # to mark out the back stop if PhilIndex.params.xds.backstop_mask: logger.debug("Applying mask to BKGINIT.pck") # copy the original file cbf_old = os.path.join(init.get_working_directory(), "BKGINIT.cbf") cbf_save = os.path.join(init.get_working_directory(), "BKGINIT.sav") shutil.copyfile(cbf_old, cbf_save) # modify the file to give the new mask from xia2.Toolkit.BackstopMask import BackstopMask mask = BackstopMask(PhilIndex.params.xds.backstop_mask) mask.apply_mask_xds(self.get_header(), cbf_save, cbf_old) init.reload() for file in ["BLANK.cbf", "BKGINIT.cbf", "GAIN.cbf"]: self._indxr_payload[file] = init.get_output_data_file(file) if PhilIndex.params.xia2.settings.developmental.use_dials_spotfinder: spotfinder = self.DialsSpotfinder() for block in self._indxr_images: spotfinder.add_spot_range(block[0], block[1]) spotfinder.run() export = self.DialsExportSpotXDS() export.set_input_data_file( "observations.refl", spotfinder.get_output_data_file("observations.refl"), ) export.run() for file in ["SPOT.XDS"]: self._indxr_payload[file] = export.get_output_data_file(file) else: # next start to process these - then colspot colspot = self.Colspot() for file in ( "X-CORRECTIONS.cbf", "Y-CORRECTIONS.cbf", "BLANK.cbf", "BKGINIT.cbf", "GAIN.cbf", ): colspot.set_input_data_file(file, self._indxr_payload[file]) colspot.set_data_range(first, last) colspot.set_background_range( self._indxr_images[0][0], self._indxr_images[0][1] ) for block in self._indxr_images: colspot.add_spot_range(block[0], block[1]) colspot.run() for file in ["SPOT.XDS"]: self._indxr_payload[file] = colspot.get_output_data_file(file)
def render(self, canvas): from scitbx.array_family import flex from libtbx.utils import frange import math size = self.GetSize() border = 10 i_rows = flex.double_range(border, size[1] - border) scene = self.scene if self.scene.settings.scale_colors_multiplicity: data = self.scene.multiplicities.data() else: data = self.scene.data if self.settings.sqrt_scale_colors: data = flex.sqrt(data) min_data = flex.min(data) max_data = flex.max(data) data_for_colors = flex.double( frange(max_data, min_data, -(max_data - min_data) / len(i_rows))) tick_step = int(math.ceil((max_data - min_data) / 10)) i_row_ticks = [] tick_text = [] start_tick = math.floor(max_data) i_tick = 0 for i in range(len(data_for_colors) - 1): tick_d = start_tick - tick_step * i_tick if abs(data_for_colors[i] - tick_d) < abs(data_for_colors[i + 1] - tick_d): i_row_ticks.append(i_rows[i]) tick_text.append(str(int(tick_d))) i_tick += 1 tick_d = start_tick - tick_step * i_tick if tick_d == min_data: i_row_ticks.append(i_rows[-1]) tick_text.append(str(int(tick_d))) from scitbx import graphics_utils if (self.settings.color_scheme in ["rainbow", "heatmap", "redblue"]): colors = graphics_utils.color_by_property( properties=data_for_colors, selection=flex.bool(data_for_colors.size(), True), color_all=False, gradient_type=self.settings.color_scheme) elif (self.settings.color_scheme == "grayscale"): colors = graphics_utils.grayscale_by_property( properties=data_for_colors, selection=flex.bool(data_for_colors.size(), True), shade_all=False, invert=self.settings.black_background) else: if (self.settings.black_background): base_color = (1.0, 1.0, 1.0) else: base_color = (0.0, 0.0, 0.0) colors = flex.vec3_double(data_for_colors.size(), base_color) l_padding = border r_padding = 4 * border for i_row, color in zip(i_rows, colors): self.draw_line(canvas, l_padding, i_row, size[0] - r_padding, i_row, color=color) for i_row, text in zip(i_row_ticks, tick_text): self.draw_text(canvas, text, size[0] - 0.8 * r_padding, i_row - 5) self.draw_line(canvas, size[0] - r_padding - 10, i_row, size[0] - r_padding, i_row)
def exercise_gaussian_fit(): # test fitting of a gaussian def do_gaussian_fit(scale, mu, sigma): start = mu - 6 * sigma stop = mu + 6 * sigma step = (stop - start) / 1000 x = flex.double(frange(start, stop, step)) y = scale * flex.exp(-flex.pow2(x - mu) / (2 * sigma**2)) fit = curve_fitting.single_gaussian_fit(x, y) assert approx_equal(fit.a, scale, 1e-4) assert approx_equal(fit.b, mu, eps=1e-4) assert approx_equal(fit.c, sigma, eps=1e-4) for i in range(10): scale = random.random() * 1000 sigma = (random.random() + 0.0001) * 10 mu = (-1)**random.randint(0, 1) * random.random() * 1000 functor = curve_fitting.gaussian(scale, mu, sigma) start = mu - 6 * sigma stop = mu + 6 * sigma step = (stop - start) / 1000 x = flex.double(frange(start, stop, step)) fd_grads = finite_differences(functor, x) assert approx_equal(functor.partial_derivatives(x), fd_grads, 1e-4) do_gaussian_fit(scale, mu, sigma) # if we take the log of a gaussian we can fit a parabola scale = 123 mu = 3.2 sigma = 0.1 x = flex.double(frange(2, 4, 0.01)) y = scale * flex.exp(-flex.pow2(x - mu) / (2 * sigma**2)) # need to be careful to only use values of y > 0 eps = 1e-15 x = flex.double([x[i] for i in range(x.size()) if y[i] > eps]) y = flex.double([y[i] for i in range(y.size()) if y[i] > eps]) fit = curve_fitting.univariate_polynomial_fit(x, flex.log(y), degree=2) c, b, a = fit.params assert approx_equal(mu, -b / (2 * a)) assert approx_equal(sigma * sigma, -1 / (2 * a)) # test multiple gaussian fits gaussians = [ curve_fitting.gaussian(0.3989538, 3.7499764, 0.7500268), curve_fitting.gaussian(0.7978957, 6.0000004, 0.5000078) ] x = flex.double(frange(0, 10, 0.1)) y = flex.double(x.size()) for i in range(len(gaussians)): g = gaussians[i] scale, mu, sigma = g.a, g.b, g.c y += g(x) starting_gaussians = [ curve_fitting.gaussian(1, 4, 1), curve_fitting.gaussian(1, 5, 1) ] fit = curve_fitting.gaussian_fit(x, y, starting_gaussians) for g1, g2 in zip(gaussians, fit.gaussians): assert approx_equal(g1.a, g2.a, eps=1e-4) assert approx_equal(g1.b, g2.b, eps=1e-4) assert approx_equal(g1.c, g2.c, eps=1e-4) # use example of 5-gaussian fit from here: # http://research.stowers-institute.org/efg/R/Statistics/MixturesOfDistributions/index.htm gaussians = [ curve_fitting.gaussian(0.10516252, 23.32727, 2.436638), curve_fitting.gaussian(0.46462715, 33.09053, 2.997594), curve_fitting.gaussian(0.29827916, 41.27244, 4.274585), curve_fitting.gaussian(0.08986616, 51.24468, 5.077521), curve_fitting.gaussian(0.04206501, 61.31818, 7.070303) ] x = flex.double(frange(0, 80, 0.1)) y = flex.double(x.size()) for i in range(len(gaussians)): g = gaussians[i] scale, mu, sigma = g.a, g.b, g.c y += g(x) termination_params = scitbx.lbfgs.termination_parameters( min_iterations=500) starting_gaussians = [ curve_fitting.gaussian(1, 21, 2.1), curve_fitting.gaussian(1, 30, 2.8), curve_fitting.gaussian(1, 40, 2.2), curve_fitting.gaussian(1, 51, 1.2), curve_fitting.gaussian(1, 60, 2.3) ] fit = curve_fitting.gaussian_fit(x, y, starting_gaussians, termination_params=termination_params) y_calc = fit.compute_y_calc() assert approx_equal(y, y_calc, eps=1e-2) have_cma_es = libtbx.env.has_module("cma_es") if have_cma_es: fit = curve_fitting.cma_es_minimiser(starting_gaussians, x, y) y_calc = fit.compute_y_calc() assert approx_equal(y, y_calc, eps=5e-2)
def exercise_gaussian_fit(): # test fitting of a gaussian def do_gaussian_fit(scale, mu, sigma): start = mu - 6 * sigma stop = mu + 6 * sigma step = (stop - start)/1000 x = flex.double(frange(start, stop, step)) y = scale * flex.exp(-flex.pow2(x - mu) / (2 * sigma**2)) fit = curve_fitting.single_gaussian_fit(x, y) assert approx_equal(fit.a, scale, 1e-4) assert approx_equal(fit.b, mu, eps=1e-4) assert approx_equal(fit.c, sigma, eps=1e-4) for i in range(10): scale = random.random() * 1000 sigma = (random.random() + 0.0001) * 10 mu = (-1)**random.randint(0,1) * random.random() * 1000 functor = curve_fitting.gaussian(scale, mu, sigma) start = mu - 6 * sigma stop = mu + 6 * sigma step = (stop - start)/1000 x = flex.double(frange(start, stop, step)) fd_grads = finite_differences(functor, x) assert approx_equal(functor.partial_derivatives(x), fd_grads, 1e-4) do_gaussian_fit(scale, mu, sigma) # if we take the log of a gaussian we can fit a parabola scale = 123 mu = 3.2 sigma = 0.1 x = flex.double(frange(2, 4, 0.01)) y = scale * flex.exp(-flex.pow2(x - mu) / (2 * sigma**2)) # need to be careful to only use values of y > 0 eps = 1e-15 x = flex.double([x[i] for i in range(x.size()) if y[i] > eps]) y = flex.double([y[i] for i in range(y.size()) if y[i] > eps]) fit = curve_fitting.univariate_polynomial_fit(x, flex.log(y), degree=2) c, b, a = fit.params assert approx_equal(mu, -b/(2*a)) assert approx_equal(sigma*sigma, -1/(2*a)) # test multiple gaussian fits gaussians = [curve_fitting.gaussian(0.3989538, 3.7499764, 0.7500268), curve_fitting.gaussian(0.7978957, 6.0000004, 0.5000078)] x = flex.double(frange(0, 10, 0.1)) y = flex.double(x.size()) for i in range(len(gaussians)): g = gaussians[i] scale, mu, sigma = g.a, g.b, g.c y += g(x) starting_gaussians = [ curve_fitting.gaussian(1, 4, 1), curve_fitting.gaussian(1, 5, 1)] fit = curve_fitting.gaussian_fit(x, y, starting_gaussians) for g1, g2 in zip(gaussians, fit.gaussians): assert approx_equal(g1.a, g2.a, eps=1e-4) assert approx_equal(g1.b, g2.b, eps=1e-4) assert approx_equal(g1.c, g2.c, eps=1e-4) # use example of 5-gaussian fit from here: # http://research.stowers-institute.org/efg/R/Statistics/MixturesOfDistributions/index.htm gaussians = [curve_fitting.gaussian(0.10516252, 23.32727, 2.436638), curve_fitting.gaussian(0.46462715, 33.09053, 2.997594), curve_fitting.gaussian(0.29827916, 41.27244, 4.274585), curve_fitting.gaussian(0.08986616, 51.24468, 5.077521), curve_fitting.gaussian(0.04206501, 61.31818, 7.070303)] x = flex.double(frange(0, 80, 0.1)) y = flex.double(x.size()) for i in range(len(gaussians)): g = gaussians[i] scale, mu, sigma = g.a, g.b, g.c y += g(x) termination_params = scitbx.lbfgs.termination_parameters( min_iterations=500) starting_gaussians = [curve_fitting.gaussian(1, 21, 2.1), curve_fitting.gaussian(1, 30, 2.8), curve_fitting.gaussian(1, 40, 2.2), curve_fitting.gaussian(1, 51, 1.2), curve_fitting.gaussian(1, 60, 2.3)] fit = curve_fitting.gaussian_fit( x, y, starting_gaussians, termination_params=termination_params) y_calc = fit.compute_y_calc() assert approx_equal(y, y_calc, eps=1e-2) have_cma_es = libtbx.env.has_module("cma_es") if have_cma_es: fit = curve_fitting.cma_es_minimiser(starting_gaussians, x, y) y_calc = fit.compute_y_calc() assert approx_equal(y, y_calc, eps=5e-2)
def _index_prepare(self): '''Prepare to do autoindexing - in XDS terms this will mean calling xycorr, init and colspot on the input images.''' # decide on images to work with Debug.write('XDS INDEX PREPARE:') Debug.write('Wavelength: %.6f' % self.get_wavelength()) Debug.write('Distance: %.2f' % self.get_distance()) if self._indxr_images == []: _select_images_function = getattr( self, '_index_select_images_%s' % (self._index_select_images)) wedges = _select_images_function() for wedge in wedges: self.add_indexer_image_wedge(wedge) self.set_indexer_prepare_done(True) all_images = self.get_matching_images() first = min(all_images) last = max(all_images) # next start to process these - first xycorr xycorr = self.Xycorr() xycorr.set_data_range(first, last) xycorr.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) from dxtbx.serialize.xds import to_xds converter = to_xds(self.get_imageset()) xds_beam_centre = converter.detector_origin xycorr.set_beam_centre(xds_beam_centre[0], xds_beam_centre[1]) for block in self._indxr_images: xycorr.add_spot_range(block[0], block[1]) # FIXME need to set the origin here xycorr.run() for file in ['X-CORRECTIONS.cbf', 'Y-CORRECTIONS.cbf']: self._indxr_payload[file] = xycorr.get_output_data_file(file) # next start to process these - then init if PhilIndex.params.xia2.settings.input.format.dynamic_shadowing: # find the region of the scan with the least predicted shadow # to use for background determination in XDS INIT step from dxtbx.serialize import dump from dxtbx.datablock import DataBlock imageset = self._indxr_imagesets[0] xsweep = self._indxr_sweeps[0] sweep_filename = os.path.join( self.get_working_directory(), '%s_datablock.json' %xsweep.get_name()) dump.datablock(DataBlock([imageset]), sweep_filename) from xia2.Wrappers.Dials.ShadowPlot import ShadowPlot shadow_plot = ShadowPlot() shadow_plot.set_working_directory(self.get_working_directory()) auto_logfiler(shadow_plot) shadow_plot.set_sweep_filename(sweep_filename) shadow_plot.set_json_filename( os.path.join( self.get_working_directory(), '%s_shadow_plot.json' %shadow_plot.get_xpid())) shadow_plot.run() results = shadow_plot.get_results() from scitbx.array_family import flex fraction_shadowed = flex.double(results['fraction_shadowed']) scan_points = flex.double(results['scan_points']) phi_width = self.get_phi_width() scan = imageset.get_scan() oscillation_range = scan.get_oscillation_range() oscillation = scan.get_oscillation() bg_images = self._background_images bg_range_deg = (scan.get_angle_from_image_index(bg_images[0]), scan.get_angle_from_image_index(bg_images[1])) bg_range_width = bg_range_deg[1] - bg_range_deg[0] min_shadow = 100 best_bg_range = bg_range_deg from libtbx.utils import frange for bg_range_start in frange(flex.min(scan_points), flex.max(scan_points) - bg_range_width, step=oscillation[1]): bg_range_deg = (bg_range_start, bg_range_start + bg_range_width) sel = (scan_points >= bg_range_deg[0]) & (scan_points <= bg_range_deg[1]) mean_shadow = flex.mean(fraction_shadowed.select(sel)) if mean_shadow < min_shadow: min_shadow = mean_shadow best_bg_range = bg_range_deg self._background_images = ( scan.get_image_index_from_angle(best_bg_range[0]), scan.get_image_index_from_angle(best_bg_range[1])) Debug.write('Setting background images: %s -> %s' %self._background_images) init = self.Init() for file in ['X-CORRECTIONS.cbf', 'Y-CORRECTIONS.cbf']: init.set_input_data_file(file, self._indxr_payload[file]) init.set_data_range(first, last) if self._background_images: init.set_background_range(self._background_images[0], self._background_images[1]) else: init.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) for block in self._indxr_images: init.add_spot_range(block[0], block[1]) init.run() # at this stage, need to (perhaps) modify the BKGINIT.cbf image # to mark out the back stop if PhilIndex.params.general.backstop_mask: Debug.write('Applying mask to BKGINIT.pck') # copy the original file cbf_old = os.path.join(init.get_working_directory(), 'BKGINIT.cbf') cbf_save = os.path.join(init.get_working_directory(), 'BKGINIT.sav') shutil.copyfile(cbf_old, cbf_save) # modify the file to give the new mask from xia2.Toolkit.BackstopMask import BackstopMask mask = BackstopMask(PhilIndex.params.general.backstop_mask) mask.apply_mask_xds(self.get_header(), cbf_save, cbf_old) init.reload() for file in ['BLANK.cbf', 'BKGINIT.cbf', 'GAIN.cbf']: self._indxr_payload[file] = init.get_output_data_file(file) if PhilIndex.params.xia2.settings.developmental.use_dials_spotfinder: spotfinder = self.DialsSpotfinder() for block in self._indxr_images: spotfinder.add_spot_range(block[0], block[1]) spotfinder.run() export = self.DialsExportSpotXDS() export.set_input_data_file( 'reflections.pickle', spotfinder.get_output_data_file('reflections.pickle')) export.run() for file in ['SPOT.XDS']: self._indxr_payload[file] = export.get_output_data_file(file) else: # next start to process these - then colspot colspot = self.Colspot() for file in ['X-CORRECTIONS.cbf', 'Y-CORRECTIONS.cbf', 'BLANK.cbf', 'BKGINIT.cbf', 'GAIN.cbf']: colspot.set_input_data_file(file, self._indxr_payload[file]) colspot.set_data_range(first, last) colspot.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) for block in self._indxr_images: colspot.add_spot_range(block[0], block[1]) colspot.run() for file in ['SPOT.XDS']: self._indxr_payload[file] = colspot.get_output_data_file(file) # that should be everything prepared... all of the important # files should be loaded into memory to be able to cope with # integration happening somewhere else return
def _index_prepare(self): '''Prepare to do autoindexing - in XDS terms this will mean calling xycorr, init and colspot on the input images.''' # decide on images to work with Debug.write('XDS INDEX PREPARE:') Debug.write('Wavelength: %.6f' % self.get_wavelength()) Debug.write('Distance: %.2f' % self.get_distance()) if self._indxr_images == []: _select_images_function = getattr( self, '_index_select_images_%s' % (self._index_select_images)) wedges = _select_images_function() for wedge in wedges: self.add_indexer_image_wedge(wedge) self.set_indexer_prepare_done(True) all_images = self.get_matching_images() first = min(all_images) last = max(all_images) # next start to process these - first xycorr xycorr = self.Xycorr() xycorr.set_data_range(first, last) xycorr.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) from dxtbx.serialize.xds import to_xds converter = to_xds(self.get_imageset()) xds_beam_centre = converter.detector_origin xycorr.set_beam_centre(xds_beam_centre[0], xds_beam_centre[1]) for block in self._indxr_images: xycorr.add_spot_range(block[0], block[1]) # FIXME need to set the origin here xycorr.run() for file in ['X-CORRECTIONS.cbf', 'Y-CORRECTIONS.cbf']: self._indxr_payload[file] = xycorr.get_output_data_file(file) # next start to process these - then init if PhilIndex.params.xia2.settings.input.format.dynamic_shadowing: imageset = self._indxr_imagesets[0] masker = imageset.masker().format_class( imageset.paths()[0]).get_goniometer_shadow_masker() if masker is None: # disable dynamic_shadowing PhilIndex.params.xia2.settings.input.format.dynamic_shadowing = False if PhilIndex.params.xia2.settings.input.format.dynamic_shadowing: # find the region of the scan with the least predicted shadow # to use for background determination in XDS INIT step from dxtbx.serialize import dump from dxtbx.datablock import DataBlock imageset = self._indxr_imagesets[0] xsweep = self._indxr_sweeps[0] sweep_filename = os.path.join( self.get_working_directory(), '%s_datablock.json' % xsweep.get_name()) dump.datablock(DataBlock([imageset]), sweep_filename) from xia2.Wrappers.Dials.ShadowPlot import ShadowPlot shadow_plot = ShadowPlot() shadow_plot.set_working_directory(self.get_working_directory()) auto_logfiler(shadow_plot) shadow_plot.set_sweep_filename(sweep_filename) shadow_plot.set_json_filename( os.path.join(self.get_working_directory(), '%s_shadow_plot.json' % shadow_plot.get_xpid())) shadow_plot.run() results = shadow_plot.get_results() from scitbx.array_family import flex fraction_shadowed = flex.double(results['fraction_shadowed']) if flex.max(fraction_shadowed) == 0: PhilIndex.params.xia2.settings.input.format.dynamic_shadowing = False else: scan_points = flex.double(results['scan_points']) phi_width = self.get_phi_width() scan = imageset.get_scan() oscillation_range = scan.get_oscillation_range() oscillation = scan.get_oscillation() if self._background_images is not None: bg_images = self._background_images bg_range_deg = (scan.get_angle_from_image_index( bg_images[0]), scan.get_angle_from_image_index( bg_images[1])) bg_range_width = bg_range_deg[1] - bg_range_deg[0] min_shadow = 100 best_bg_range = bg_range_deg from libtbx.utils import frange for bg_range_start in frange(flex.min(scan_points), flex.max(scan_points) - bg_range_width, step=oscillation[1]): bg_range_deg = (bg_range_start, bg_range_start + bg_range_width) sel = (scan_points >= bg_range_deg[0]) & ( scan_points <= bg_range_deg[1]) mean_shadow = flex.mean(fraction_shadowed.select(sel)) if mean_shadow < min_shadow: min_shadow = mean_shadow best_bg_range = bg_range_deg self._background_images = (scan.get_image_index_from_angle( best_bg_range[0]), scan.get_image_index_from_angle( best_bg_range[1])) Debug.write('Setting background images: %s -> %s' % self._background_images) init = self.Init() for file in ['X-CORRECTIONS.cbf', 'Y-CORRECTIONS.cbf']: init.set_input_data_file(file, self._indxr_payload[file]) init.set_data_range(first, last) if self._background_images: init.set_background_range(self._background_images[0], self._background_images[1]) else: init.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) for block in self._indxr_images: init.add_spot_range(block[0], block[1]) init.run() # at this stage, need to (perhaps) modify the BKGINIT.cbf image # to mark out the back stop if PhilIndex.params.general.backstop_mask: Debug.write('Applying mask to BKGINIT.pck') # copy the original file cbf_old = os.path.join(init.get_working_directory(), 'BKGINIT.cbf') cbf_save = os.path.join(init.get_working_directory(), 'BKGINIT.sav') shutil.copyfile(cbf_old, cbf_save) # modify the file to give the new mask from xia2.Toolkit.BackstopMask import BackstopMask mask = BackstopMask(PhilIndex.params.general.backstop_mask) mask.apply_mask_xds(self.get_header(), cbf_save, cbf_old) init.reload() for file in ['BLANK.cbf', 'BKGINIT.cbf', 'GAIN.cbf']: self._indxr_payload[file] = init.get_output_data_file(file) if PhilIndex.params.xia2.settings.developmental.use_dials_spotfinder: spotfinder = self.DialsSpotfinder() for block in self._indxr_images: spotfinder.add_spot_range(block[0], block[1]) spotfinder.run() export = self.DialsExportSpotXDS() export.set_input_data_file( 'reflections.pickle', spotfinder.get_output_data_file('reflections.pickle')) export.run() for file in ['SPOT.XDS']: self._indxr_payload[file] = export.get_output_data_file(file) else: # next start to process these - then colspot colspot = self.Colspot() for file in [ 'X-CORRECTIONS.cbf', 'Y-CORRECTIONS.cbf', 'BLANK.cbf', 'BKGINIT.cbf', 'GAIN.cbf' ]: colspot.set_input_data_file(file, self._indxr_payload[file]) colspot.set_data_range(first, last) colspot.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) for block in self._indxr_images: colspot.add_spot_range(block[0], block[1]) colspot.run() for file in ['SPOT.XDS']: self._indxr_payload[file] = colspot.get_output_data_file(file) # that should be everything prepared... all of the important # files should be loaded into memory to be able to cope with # integration happening somewhere else return