vmin=colorbar_range[0], vmax=colorbar_range[1], extent=[q_range[4], q_range[5], q_range[3], q_range[2]], ) else: plt0 = ax0.imshow( np.log10(data[padding_shape[0] // 2, :, :]), cmap=my_cmap, vmin=colorbar_range[0], vmax=colorbar_range[1], extent=[q_range[4], q_range[5], q_range[3], q_range[2]], ) ax0.invert_yaxis() # qz is pointing up ax0.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing[2])) ax0.yaxis.set_major_locator(ticker.MultipleLocator(tick_spacing[1])) gu.colorbar(plt0, numticks=numticks_colorbar, pad=cbar_pad) gu.savefig( savedir=savedir, figure=fig, axes=ax0, tick_width=tick_width, tick_length=tick_length, tick_direction=tick_direction, label_size=16, xlabels=labels[2], ylabels=labels[1], filename=sample_name + str(scan) + comment + "_fromrec_qyqz", labelbottom=draw_ticks, labelleft=draw_ticks, labelright=False, labeltop=False,
def main(calc_self, user_comment): """ Protection for multiprocessing. :param calc_self: if True, the cross-correlation will be calculated between same q-values :param user_comment: comment to include in the filename when saving results """ ########################## # check input parameters # ########################## global corr_count, current_point assert len( origin_qspace ) == 3, "origin_qspace should be a tuple of 3 integer pixel values" assert type(calc_self) is bool, "unexpected type for calc_self" assert len(q_range) > 1, "at least 2 values are needed for q_range" print('the CCF map will be calculated for {:d} q values: '.format( len(q_range))) for idx in range(len(q_range)): if calc_self: print('q1 = {:.3f} q2 = {:.3f}'.format(q_range[idx], q_range[idx])) else: print('q1 = {:.3f} q2 = {:.3f}'.format(q_range[0], q_range[idx])) warnings.filterwarnings("ignore") ################### # define colormap # ################### bad_color = '1.0' # white background colormap = gu.Colormap(bad_color=bad_color) my_cmap = colormap.cmap plt.ion() ################################### # load experimental data and mask # ################################### plt.ion() root = tk.Tk() root.withdraw() file_path = filedialog.askopenfilename( initialdir=datadir, title="Select the 3D reciprocal space map", filetypes=[("NPZ", "*.npz")]) data = np.load(file_path)['data'] file_path = filedialog.askopenfilename(initialdir=datadir, title="Select the 3D mask", filetypes=[("NPZ", "*.npz")]) mask = np.load(file_path)['mask'] print((data > hotpix_threshold).sum(), ' hotpixels masked') mask[data > hotpix_threshold] = 1 data[np.nonzero(mask)] = np.nan del mask gc.collect() file_path = filedialog.askopenfilename(initialdir=datadir, title="Select q values", filetypes=[("NPZ", "*.npz")]) qvalues = np.load(file_path) qx = qvalues['qx'] qz = qvalues['qz'] qy = qvalues['qy'] del qvalues gc.collect() ############################################################## # calculate the angular average using mean and median values # ############################################################## if plot_meandata: q_axis, y_mean_masked, y_median_masked = xcca.angular_avg( data=data, q_values=(qx, qz, qy), origin=origin_qspace, nb_bins=250, debugging=debug) fig, ax = plt.subplots(1, 1) ax.plot(q_axis, np.log10(y_mean_masked), 'r', label='mean') ax.plot(q_axis, np.log10(y_median_masked), 'b', label='median') ax.axvline(x=q_range[0], ymin=0, ymax=1, color='g', linestyle='--', label='q_start') ax.axvline(x=q_range[-1], ymin=0, ymax=1, color='r', linestyle=':', label='q_stop') ax.set_xlabel('q (1/nm)') ax.set_ylabel('Angular average (A.U.)') ax.legend() plt.pause(0.1) fig.savefig(savedir + '1D_average.png') del q_axis, y_median_masked, y_mean_masked ############################################################## # interpolate the data onto spheres at user-defined q values # ############################################################## # calculate the matrix of distances from the origin of reciprocal space distances = np.sqrt( (qx[:, np.newaxis, np.newaxis] - qx[origin_qspace[0]])**2 + (qz[np.newaxis, :, np.newaxis] - qz[origin_qspace[1]])**2 + (qy[np.newaxis, np.newaxis, :] - qy[origin_qspace[2]])**2) dq = min(qx[1] - qx[0], qz[1] - qz[0], qy[1] - qy[0]) q_int = dict() # create dictionnary dict_fields = [] [dict_fields.append('q' + str(idx + 1)) for idx in range(len(q_range))] # ['q1', 'q2', 'q3', ...] nb_points = [] for counter, q_value in enumerate(q_range): indices = np.nonzero((np.logical_and((distances < q_value + dq), (distances > q_value - dq)))) nb_voxels = indices[0].shape print( '\nNumber of voxels for the sphere of radius q ={:.3f} 1/nm:'. format(q_value), nb_voxels) qx_voxels = qx[indices[0]] # qx downstream, axis 0 qz_voxels = qz[indices[1]] # qz vertical up, axis 1 qy_voxels = qy[indices[2]] # qy outboard, axis 2 int_voxels = data[indices] if debug: # calculate the stereographic projection stereo_proj, uv_labels = fu.calc_stereoproj_facet( projection_axis=1, radius_mean=q_value, stereo_center=0, vectors=np.concatenate( (qx_voxels[:, np.newaxis], qz_voxels[:, np.newaxis], qy_voxels[:, np.newaxis]), axis=1)) # plot the projection from the South pole fig, _ = gu.scatter_stereographic( euclidian_u=stereo_proj[:, 0], euclidian_v=stereo_proj[:, 1], color=int_voxels, title='Projection from the South pole' ' at q={:.3f} (1/nm)'.format(q_value), uv_labels=uv_labels, cmap=my_cmap) fig.savefig(savedir + 'South pole_q={:.3f}.png'.format(q_value)) plt.close(fig) # plot the projection from the North pole fig, _ = gu.scatter_stereographic( euclidian_u=stereo_proj[:, 2], euclidian_v=stereo_proj[:, 3], color=int_voxels, title='Projection from the North pole' ' at q={:.3f} (1/nm)'.format(q_value), uv_labels=uv_labels, cmap=my_cmap) fig.savefig(savedir + 'North pole_q={:.3f}.png'.format(q_value)) plt.close(fig) # look for nan values nan_indices = np.argwhere(np.isnan(int_voxels)) # remove nan values before calculating the cross-correlation function qx_voxels = np.delete(qx_voxels, nan_indices) qz_voxels = np.delete(qz_voxels, nan_indices) qy_voxels = np.delete(qy_voxels, nan_indices) int_voxels = np.delete(int_voxels, nan_indices) # normalize the intensity by the median value (remove the influence of the form factor) print('q={:.3f}:'.format(q_value), ' normalizing by the median value', np.median(int_voxels)) int_voxels = int_voxels / np.median(int_voxels) q_int[dict_fields[counter]] = np.concatenate( (qx_voxels[:, np.newaxis], qz_voxels[:, np.newaxis], qy_voxels[:, np.newaxis], int_voxels[:, np.newaxis]), axis=1) # update the number of points without nan nb_points.append(len(qx_voxels)) print('q={:.3f}:'.format(q_value), ' removing', nan_indices.size, 'nan values,', nb_points[counter], 'remain') del qx_voxels, qz_voxels, qy_voxels, int_voxels, indices, nan_indices gc.collect() del qx, qy, qz, distances, data gc.collect() ############################################ # calculate the cross-correlation function # ############################################ cross_corr = np.empty((len(q_range), int(180 / angular_resolution), 2)) angular_bins = np.linspace(start=0, stop=np.pi, num=corr_count.shape[0], endpoint=False) start = time.time() print("\nNumber of processors: ", mp.cpu_count()) mp.freeze_support() for ind_q in range(len(q_range)): pool = mp.Pool(mp.cpu_count()) # use this number of processes if calc_self: key_q1 = 'q' + str(ind_q + 1) key_q2 = key_q1 print('\n' + key_q2 + ': the CCF will be calculated over {:d} * {:d}' ' points and {:d} angular bins'.format( nb_points[ind_q], nb_points[ind_q], corr_count.shape[0])) for ind_point in range(nb_points[ind_q]): pool.apply_async(xcca.calc_ccf_rect, args=(ind_point, key_q1, key_q2, angular_bins, q_int), callback=collect_result, error_callback=util.catch_error) else: key_q1 = 'q1' key_q2 = 'q' + str(ind_q + 1) print('\n' + key_q2 + ': the CCF will be calculated over {:d} * {:d}' ' points and {:d} angular bins'.format( nb_points[0], nb_points[ind_q], corr_count.shape[0])) for ind_point in range(nb_points[0]): pool.apply_async(xcca.calc_ccf_rect, args=(ind_point, key_q1, key_q2, angular_bins, q_int), callback=collect_result, error_callback=util.catch_error) # close the pool and let all the processes complete pool.close() pool.join( ) # postpones the execution of next line of code until all processes in the queue are done. # normalize the cross-correlation by the counter indices = np.nonzero(corr_count[:, 1]) corr_count[indices, 0] = corr_count[indices, 0] / corr_count[indices, 1] cross_corr[ind_q, :, :] = corr_count # initialize the globals for the next q value corr_count = np.zeros( (int(180 / angular_resolution), 2)) # corr_count is declared as a global, this should work current_point = 0 end = time.time() print('\nTime ellapsed for the calculation of the CCF map:', str(datetime.timedelta(seconds=int(end - start)))) ####################################### # save the cross-correlation function # ####################################### if calc_self: user_comment = user_comment + '_self' else: user_comment = user_comment + '_cross' filename = 'CCFmap_qstart={:.3f}_qstop={:.3f}'.format(q_range[0], q_range[-1]) +\ '_res{:.3f}'.format(angular_resolution) + user_comment np.savez_compressed(savedir + filename + '.npz', angles=180 * angular_bins / np.pi, q_range=q_range, ccf=cross_corr[:, :, 0], points=cross_corr[:, :, 1]) ####################################### # plot the cross-correlation function # ####################################### # find the y limit excluding the peaks at 0 and 180 degrees indices = np.argwhere( np.logical_and((angular_bins >= 20 * np.pi / 180), (angular_bins <= 160 * np.pi / 180))) vmax = 1.2 * cross_corr[:, indices, 0].max() print('Discarding CCF values with a zero counter:', (cross_corr[:, :, 1] == 0).sum(), 'points masked') cross_corr[(cross_corr[:, :, 1] == 0), 0] = np.nan # discard these values of the CCF dq = q_range[1] - q_range[0] fig, ax = plt.subplots() plt0 = ax.imshow( cross_corr[:, :, 0], cmap=my_cmap, vmin=0, vmax=vmax, extent=[0, 180, q_range[-1] + dq / 2, q_range[0] - dq / 2]) # extent (left, right, bottom, top) ax.set_xlabel('Angle (deg)') ax.set_ylabel('q (nm$^{-1}$)') ax.set_xticks(np.arange(0, 181, 30)) ax.set_yticks(q_range) ax.set_aspect('auto') if calc_self: ax.set_title('self CCF from q={:.3f} 1/nm to q={:.3f} 1/nm'.format( q_range[0], q_range[-1])) else: ax.set_title('cross CCF from q={:.3f} 1/nm to q={:.3f} 1/nm'.format( q_range[0], q_range[-1])) gu.colorbar(plt0, scale='linear', numticks=5) fig.savefig(savedir + filename + '.png') plt.ioff() plt.show()
tuple_scale="linear", cmap=my_cmap, ylabel=("Counts (a.u.)", ""), ) max_y, max_x = np.unravel_index(abs(data).argmax(), data.shape) print( f"Max of the concatenated data along axis 0 at (y, x): ({max_y}, {max_x}) " f"Max = {int(data[max_y, max_x])}") # plot the region of interest centered on the peak # extent (left, right, bottom, top) fig, ax = plt.subplots(nrows=1, ncols=1) plot = ax.imshow( np.log10(data[y0 - width[0]:y0 + width[1], x0 - width[2]:x0 + width[3]]), vmin=vmin, vmax=vmax, cmap=my_cmap, extent=[ x0 - width[2] - 0.5, x0 + width[3] - 0.5, y0 + width[1] - 0.5, y0 - width[0] - 0.5, ], ) ax.set_title( f"{title} Peak at (y, x): ({y0},{x0}) Bragg peak value = {peak_int}") gu.colorbar(plot) fig.savefig(setup.detector.savedir + f"sum_S{scan}.png") plt.show()
ax3.scatter(x_axis, xcom, s=24, c=scans, cmap=my_cmap) ax3.set_xlabel(x_label) if peak_method in ["com", "max_com"]: ax3.set_ylabel("xcom (pixels)") else: # 'max' ax3.set_ylabel("xmax (pixels)") ax3.set_facecolor(bckg_color) ax4.scatter(x_axis, ycom, s=24, c=scans, cmap=my_cmap) ax4.set_xlabel(x_label) if peak_method in ["com", "max_com"]: ax4.set_ylabel("ycom (pixels)") else: # 'max' ax4.set_ylabel("ymax (pixels)") ax4.set_facecolor(bckg_color) plt5 = ax5.scatter(x_axis, zcom, s=24, c=scans, cmap=my_cmap) gu.colorbar(plt5, scale="linear", numticks=min(len(scans), 20), label="scan #") ax5.set_xlabel(x_label) if peak_method in ["com", "max_com"]: ax5.set_ylabel("zcom (pixels)") else: # 'max' ax5.set_ylabel("zmax (pixels)") ax5.set_facecolor(bckg_color) plt.tight_layout() plt.pause(0.1) fig.savefig(setup.detector.savedir + "summary" + comment + ".png") ############################################ # plot the evolution of the incident angle # ############################################ tilt_com = np.asarray(tilt_com) x_axis = np.asarray(x_axis)
idx = 0 original_data = np.copy(data) if scale == "linear": plot = axis.imshow(data[idx, :, :], vmin=vmin, vmax=max_colorbar, cmap=my_cmap) else: # 'log' plot = axis.imshow(np.log10(data[idx, :, :]), vmin=vmin, vmax=max_colorbar, cmap=my_cmap) axis.set_title("Frame " + str(idx + 1) + "/" + str(nz) + "\n" "q quit ; u next frame ; d previous frame ; p unzoom\n" "right darker ; left brighter ; r save 2D frame") gu.colorbar(plot, numticks=5) plt.connect("key_press_event", press_key) fig_loop.set_facecolor(background_plot) plt.show() # in XZ dim = 1 fig_loop = plt.figure(figsize=(12, 9)) fig_loop.canvas.mpl_disconnect( fig_loop.canvas.manager.key_press_handler_id) axis = fig_loop.add_subplot(111) idx = 0 if scale == "linear": plot = axis.imshow(data[:, idx, :], vmin=vmin, vmax=max_colorbar,
def main(calc_self, user_comment): """ Protection for multiprocessing. :param calc_self: if True, the cross-correlation will be calculated between same q-values :param user_comment: comment to include in the filename when saving results """ ########################## # check input parameters # ########################## global corr_count, current_point if len(origin_qspace) != 3: raise ValueError( "origin_qspace should be a tuple of 3 integer pixel values") if type(calc_self) is not bool: raise TypeError(f"got unexpected type {type(calc_self)} for calc_self") if len(q_range) <= 1: raise ValueError("at least 2 values are needed for q_range") print("the CCF map will be calculated for {:d} q values: ".format( len(q_range))) for _, item in enumerate(q_range): if calc_self: print(f"q1 = {item:.3f} q2 = {item:.3f}") else: print("q1 = {:.3f} q2 = {:.3f}".format(q_range[0], item)) warnings.filterwarnings("ignore") ################### # define colormap # ################### bad_color = "1.0" # white background my_cmap = ColormapFactory(bad_color=bad_color).generate_cmap() plt.ion() ################################### # load experimental data and mask # ################################### plt.ion() root = tk.Tk() root.withdraw() file_path = filedialog.askopenfilename( initialdir=datadir, title="Select the 3D reciprocal space map", filetypes=[("NPZ", "*.npz")], ) data = np.load(file_path)["data"] file_path = filedialog.askopenfilename(initialdir=datadir, title="Select the 3D mask", filetypes=[("NPZ", "*.npz")]) mask = np.load(file_path)["mask"] print((data > hotpix_threshold).sum(), " hotpixels masked") mask[data > hotpix_threshold] = 1 data[np.nonzero(mask)] = np.nan del mask gc.collect() file_path = filedialog.askopenfilename(initialdir=datadir, title="Select q values", filetypes=[("NPZ", "*.npz")]) qvalues = np.load(file_path) qx = qvalues["qx"] qz = qvalues["qz"] qy = qvalues["qy"] del qvalues gc.collect() ############################################################## # calculate the angular average using mean and median values # ############################################################## if plot_meandata: q_axis, y_mean_masked, y_median_masked = xcca.angular_avg( data=data, q_values=(qx, qz, qy), origin=origin_qspace, nb_bins=250, debugging=debug, ) fig, ax = plt.subplots(1, 1) ax.plot(q_axis, np.log10(y_mean_masked), "r", label="mean") ax.plot(q_axis, np.log10(y_median_masked), "b", label="median") ax.axvline(x=q_range[0], ymin=0, ymax=1, color="g", linestyle="--", label="q_start") ax.axvline(x=q_range[-1], ymin=0, ymax=1, color="r", linestyle=":", label="q_stop") ax.set_xlabel("q (1/nm)") ax.set_ylabel("Angular average (A.U.)") ax.legend() plt.pause(0.1) fig.savefig(savedir + "1D_average.png") del q_axis, y_median_masked, y_mean_masked ############################################################## # interpolate the data onto spheres at user-defined q values # ############################################################## # calculate the matrix of distances from the origin of reciprocal space distances = np.sqrt( (qx[:, np.newaxis, np.newaxis] - qx[origin_qspace[0]])**2 + (qz[np.newaxis, :, np.newaxis] - qz[origin_qspace[1]])**2 + (qy[np.newaxis, np.newaxis, :] - qy[origin_qspace[2]])**2) dq = min(qx[1] - qx[0], qz[1] - qz[0], qy[1] - qy[0]) theta_phi_int = {} # create dictionnary dict_fields = ["q" + str(idx + 1) for idx, _ in enumerate(q_range)] # ['q1', 'q2', 'q3', ...] nb_points = [] for counter, q_value in enumerate(q_range): nb_pixels = (np.logical_and((distances < q_value + dq), (distances > q_value - dq))).sum() print( "\nNumber of voxels for the sphere of radius q ={:.3f} 1/nm:". format(q_value), nb_pixels, ) nb_pixels = int(nb_pixels / interp_factor) print("Dividing the number of voxels by interp_factor: " f"{nb_pixels:d} remaining voxels ") indices = np.arange(0, nb_pixels, dtype=float) + 0.5 # angles for interpolation are chosen using the 'golden spiral method', # so that the corresponding points are evenly distributed on the sphere theta = np.arccos( 1 - 2 * indices / nb_pixels) # theta is the polar angle of the spherical coordinates phi = (np.pi * (1 + np.sqrt(5)) * indices ) # phi is the azimuthal angle of the spherical coordinates qx_sphere = q_value * np.cos(phi) * np.sin(theta) qz_sphere = q_value * np.cos(theta) qy_sphere = q_value * np.sin(phi) * np.sin(theta) # interpolate the data onto the new points rgi = RegularGridInterpolator((qx, qz, qy), data, method="linear", bounds_error=False, fill_value=np.nan) sphere_int = rgi( np.concatenate(( qx_sphere.reshape((1, nb_pixels)), qz_sphere.reshape((1, nb_pixels)), qy_sphere.reshape((1, nb_pixels)), )).transpose()) # look for nan values nan_indices = np.argwhere(np.isnan(sphere_int)) if debug: sphere_debug = np.copy( sphere_int ) # create a copy to see also nans in the debugging plot else: sphere_debug = None # remove nan values before calculating the cross-correlation function theta = np.delete(theta, nan_indices) phi = np.delete(phi, nan_indices) sphere_int = np.delete(sphere_int, nan_indices) # normalize the intensity by the median value (remove the influence of the # form factor) print( "q={:.3f}:".format(q_value), " normalizing by the median value", np.median(sphere_int), ) sphere_int = sphere_int / np.median(sphere_int) theta_phi_int[dict_fields[counter]] = np.concatenate( (theta[:, np.newaxis], phi[:, np.newaxis], sphere_int[:, np.newaxis]), axis=1, ) # update the number of points without nan nb_points.append(len(theta)) print( "q={:.3f}:".format(q_value), " removing", nan_indices.size, "nan values,", nb_points[counter], "remain", ) if debug: # calculate the stereographic projection stereo_proj, uv_labels = fu.calc_stereoproj_facet( projection_axis=1, radius_mean=q_value, stereo_center=0, vectors=np.concatenate( ( qx_sphere[:, np.newaxis], qz_sphere[:, np.newaxis], qy_sphere[:, np.newaxis], ), axis=1, ), ) # plot the projection from the South pole fig, _ = gu.scatter_stereographic( euclidian_u=stereo_proj[:, 0], euclidian_v=stereo_proj[:, 1], color=sphere_debug, title="Projection from the South pole" " at q={:.3f} (1/nm)".format(q_value), uv_labels=uv_labels, cmap=my_cmap, ) fig.savefig(savedir + "South pole_q={:.3f}.png".format(q_value)) plt.close(fig) # plot the projection from the North pole fig, _ = gu.scatter_stereographic( euclidian_u=stereo_proj[:, 2], euclidian_v=stereo_proj[:, 3], color=sphere_debug, title="Projection from the North pole" " at q={:.3f} (1/nm)".format(q_value), uv_labels=uv_labels, cmap=my_cmap, ) fig.savefig(savedir + "North pole_q={:.3f}.png".format(q_value)) plt.close(fig) del sphere_debug del ( qx_sphere, qz_sphere, qy_sphere, theta, phi, sphere_int, indices, nan_indices, ) gc.collect() del qx, qy, qz, distances, data gc.collect() ############################################ # calculate the cross-correlation function # ############################################ cross_corr = np.empty((len(q_range), int(180 / angular_resolution), 2)) angular_bins = np.linspace(start=0, stop=np.pi, num=corr_count.shape[0], endpoint=False) start = time.time() print("\nNumber of processors: ", mp.cpu_count()) mp.freeze_support() for ind_q in range(len(q_range)): pool = mp.Pool(mp.cpu_count()) # use this number of processes if calc_self: key_q1 = "q" + str(ind_q + 1) key_q2 = key_q1 print("\n" + key_q2 + ": the CCF will be calculated over {:d} * {:d}" " points and {:d} angular bins".format( nb_points[ind_q], nb_points[ind_q], corr_count.shape[0])) for ind_point in range(nb_points[ind_q]): pool.apply_async( xcca.calc_ccf_polar, args=(ind_point, key_q1, key_q2, angular_bins, theta_phi_int), callback=collect_result, error_callback=util.catch_error, ) else: key_q1 = "q1" key_q2 = "q" + str(ind_q + 1) print("\n" + key_q2 + ": the CCF will be calculated over {:d} * {:d}" " points and {:d} angular bins".format( nb_points[0], nb_points[ind_q], corr_count.shape[0])) for ind_point in range(nb_points[0]): pool.apply_async( xcca.calc_ccf_polar, args=(ind_point, key_q1, key_q2, angular_bins, theta_phi_int), callback=collect_result, error_callback=util.catch_error, ) # close the pool and let all the processes complete pool.close() pool.join() # postpones the execution of next line of code until all # processes in the queue are done. # normalize the cross-correlation by the counter indices = np.nonzero(corr_count[:, 1]) corr_count[indices, 0] = corr_count[indices, 0] / corr_count[indices, 1] cross_corr[ind_q, :, :] = corr_count # initialize the globals for the next q value corr_count = np.zeros( (int(180 / angular_resolution), 2)) # corr_count is declared as a global, this should work current_point = 0 end = time.time() print( "\nTime ellapsed for the calculation of the CCF map:", str(datetime.timedelta(seconds=int(end - start))), ) ####################################### # save the cross-correlation function # ####################################### if calc_self: user_comment = user_comment + "_self" else: user_comment = user_comment + "_cross" filename = ( "CCFmap_qstart={:.3f}_qstop={:.3f}".format(q_range[0], q_range[-1]) + "_interp{:d}_res{:.3f}".format(interp_factor, angular_resolution) + user_comment) np.savez_compressed( savedir + filename + ".npz", angles=180 * angular_bins / np.pi, q_range=q_range, ccf=cross_corr[:, :, 0], points=cross_corr[:, :, 1], ) ####################################### # plot the cross-correlation function # ####################################### # find the y limit excluding the peaks at 0 and 180 degrees indices = np.argwhere( np.logical_and((angular_bins >= 20 * np.pi / 180), (angular_bins <= 160 * np.pi / 180))) vmax = 1.2 * cross_corr[:, indices, 0].max() print( "Discarding CCF values with a zero counter:", (cross_corr[:, :, 1] == 0).sum(), "points masked", ) cross_corr[(cross_corr[:, :, 1] == 0), 0] = np.nan # discard these values of the CCF dq = q_range[1] - q_range[0] fig, ax = plt.subplots() plt0 = ax.imshow( cross_corr[:, :, 0], cmap=my_cmap, vmin=0, vmax=vmax, extent=[0, 180, q_range[-1] + dq / 2, q_range[0] - dq / 2], ) # extent (left, right, bottom, top) ax.set_xlabel("Angle (deg)") ax.set_ylabel("q (nm$^{-1}$)") ax.set_xticks(np.arange(0, 181, 30)) ax.set_yticks(q_range) ax.set_aspect("auto") if calc_self: ax.set_title("self CCF from q={:.3f} 1/nm to q={:.3f} 1/nm".format( q_range[0], q_range[-1])) else: ax.set_title("cross CCF from q={:.3f} 1/nm to q={:.3f} 1/nm".format( q_range[0], q_range[-1])) gu.colorbar(plt0, scale="linear", numticks=5) fig.savefig(savedir + filename + ".png") plt.ioff() plt.show()
ax3.scatter(x_axis, xcom, s=24, c=scans, cmap=my_cmap) ax3.set_xlabel(x_label) if peak_method in ['com', 'max_com']: ax3.set_ylabel('xcom (pixels)') else: # 'max' ax3.set_ylabel('xmax (pixels)') ax3.set_facecolor(bckg_color) ax4.scatter(x_axis, ycom, s=24, c=scans, cmap=my_cmap) ax4.set_xlabel(x_label) if peak_method in ['com', 'max_com']: ax4.set_ylabel('ycom (pixels)') else: # 'max' ax4.set_ylabel('ymax (pixels)') ax4.set_facecolor(bckg_color) plt5 = ax5.scatter(x_axis, zcom, s=24, c=scans, cmap=my_cmap) gu.colorbar(plt5, scale='linear', numticks=min(len(scans), 20), label='scan #') ax5.set_xlabel(x_label) if peak_method in ['com', 'max_com']: ax5.set_ylabel('zcom (pixels)') else: # 'max' ax5.set_ylabel('zmax (pixels)') ax5.set_facecolor(bckg_color) plt.tight_layout() plt.pause(0.1) fig.savefig(detector.savedir + 'summary' + comment + '.png') ############################################ # plot the evolution of the incident angle # ############################################ tilt_com = np.asarray(tilt_com) x_axis = np.asarray(x_axis)