Ejemplo n.º 1
0
def main(calc_self, user_comment):
    """
    Protection for multiprocessing.

    :param calc_self: if True, the cross-correlation will be calculated between same q-values
    :param user_comment: comment to include in the filename when saving results
    """
    ##########################
    # check input parameters #
    ##########################
    global corr_count, current_point
    assert len(
        origin_qspace
    ) == 3, "origin_qspace should be a tuple of 3 integer pixel values"
    assert type(calc_self) is bool, "unexpected type for calc_self"
    assert len(q_range) > 1, "at least 2 values are needed for q_range"

    print('the CCF map will be calculated for {:d} q values: '.format(
        len(q_range)))
    for idx in range(len(q_range)):
        if calc_self:
            print('q1 = {:.3f}  q2 = {:.3f}'.format(q_range[idx],
                                                    q_range[idx]))
        else:
            print('q1 = {:.3f}  q2 = {:.3f}'.format(q_range[0], q_range[idx]))
    warnings.filterwarnings("ignore")

    ###################
    # define colormap #
    ###################
    bad_color = '1.0'  # white background
    colormap = gu.Colormap(bad_color=bad_color)
    my_cmap = colormap.cmap
    plt.ion()

    ###################################
    # load experimental data and mask #
    ###################################
    plt.ion()
    root = tk.Tk()
    root.withdraw()
    file_path = filedialog.askopenfilename(
        initialdir=datadir,
        title="Select the 3D reciprocal space map",
        filetypes=[("NPZ", "*.npz")])
    data = np.load(file_path)['data']

    file_path = filedialog.askopenfilename(initialdir=datadir,
                                           title="Select the 3D mask",
                                           filetypes=[("NPZ", "*.npz")])
    mask = np.load(file_path)['mask']

    print((data > hotpix_threshold).sum(), ' hotpixels masked')
    mask[data > hotpix_threshold] = 1
    data[np.nonzero(mask)] = np.nan
    del mask
    gc.collect()

    file_path = filedialog.askopenfilename(initialdir=datadir,
                                           title="Select q values",
                                           filetypes=[("NPZ", "*.npz")])
    qvalues = np.load(file_path)
    qx = qvalues['qx']
    qz = qvalues['qz']
    qy = qvalues['qy']

    del qvalues
    gc.collect()

    ##############################################################
    # calculate the angular average using mean and median values #
    ##############################################################
    if plot_meandata:
        q_axis, y_mean_masked, y_median_masked = xcca.angular_avg(
            data=data,
            q_values=(qx, qz, qy),
            origin=origin_qspace,
            nb_bins=250,
            debugging=debug)
        fig, ax = plt.subplots(1, 1)
        ax.plot(q_axis, np.log10(y_mean_masked), 'r', label='mean')
        ax.plot(q_axis, np.log10(y_median_masked), 'b', label='median')
        ax.axvline(x=q_range[0],
                   ymin=0,
                   ymax=1,
                   color='g',
                   linestyle='--',
                   label='q_start')
        ax.axvline(x=q_range[-1],
                   ymin=0,
                   ymax=1,
                   color='r',
                   linestyle=':',
                   label='q_stop')
        ax.set_xlabel('q (1/nm)')
        ax.set_ylabel('Angular average (A.U.)')
        ax.legend()
        plt.pause(0.1)
        fig.savefig(savedir + '1D_average.png')

        del q_axis, y_median_masked, y_mean_masked

    ##############################################################
    # interpolate the data onto spheres at user-defined q values #
    ##############################################################
    # calculate the matrix of distances from the origin of reciprocal space
    distances = np.sqrt(
        (qx[:, np.newaxis, np.newaxis] - qx[origin_qspace[0]])**2 +
        (qz[np.newaxis, :, np.newaxis] - qz[origin_qspace[1]])**2 +
        (qy[np.newaxis, np.newaxis, :] - qy[origin_qspace[2]])**2)
    dq = min(qx[1] - qx[0], qz[1] - qz[0], qy[1] - qy[0])

    q_int = dict()  # create dictionnary
    dict_fields = []
    [dict_fields.append('q' + str(idx + 1))
     for idx in range(len(q_range))]  # ['q1', 'q2', 'q3', ...]
    nb_points = []

    for counter, q_value in enumerate(q_range):
        indices = np.nonzero((np.logical_and((distances < q_value + dq),
                                             (distances > q_value - dq))))
        nb_voxels = indices[0].shape
        print(
            '\nNumber of voxels for the sphere of radius q ={:.3f} 1/nm:'.
            format(q_value), nb_voxels)

        qx_voxels = qx[indices[0]]  # qx downstream, axis 0
        qz_voxels = qz[indices[1]]  # qz vertical up, axis 1
        qy_voxels = qy[indices[2]]  # qy outboard, axis 2
        int_voxels = data[indices]

        if debug:
            # calculate the stereographic projection
            stereo_proj, uv_labels = fu.calc_stereoproj_facet(
                projection_axis=1,
                radius_mean=q_value,
                stereo_center=0,
                vectors=np.concatenate(
                    (qx_voxels[:, np.newaxis], qz_voxels[:, np.newaxis],
                     qy_voxels[:, np.newaxis]),
                    axis=1))
            # plot the projection from the South pole
            fig, _ = gu.scatter_stereographic(
                euclidian_u=stereo_proj[:, 0],
                euclidian_v=stereo_proj[:, 1],
                color=int_voxels,
                title='Projection from the South pole'
                ' at q={:.3f} (1/nm)'.format(q_value),
                uv_labels=uv_labels,
                cmap=my_cmap)
            fig.savefig(savedir + 'South pole_q={:.3f}.png'.format(q_value))
            plt.close(fig)

            # plot the projection from the North pole
            fig, _ = gu.scatter_stereographic(
                euclidian_u=stereo_proj[:, 2],
                euclidian_v=stereo_proj[:, 3],
                color=int_voxels,
                title='Projection from the North pole'
                ' at q={:.3f} (1/nm)'.format(q_value),
                uv_labels=uv_labels,
                cmap=my_cmap)
            fig.savefig(savedir + 'North pole_q={:.3f}.png'.format(q_value))
            plt.close(fig)

        # look for nan values
        nan_indices = np.argwhere(np.isnan(int_voxels))

        #  remove nan values before calculating the cross-correlation function
        qx_voxels = np.delete(qx_voxels, nan_indices)
        qz_voxels = np.delete(qz_voxels, nan_indices)
        qy_voxels = np.delete(qy_voxels, nan_indices)
        int_voxels = np.delete(int_voxels, nan_indices)

        # normalize the intensity by the median value (remove the influence of the form factor)
        print('q={:.3f}:'.format(q_value), ' normalizing by the median value',
              np.median(int_voxels))
        int_voxels = int_voxels / np.median(int_voxels)

        q_int[dict_fields[counter]] = np.concatenate(
            (qx_voxels[:, np.newaxis], qz_voxels[:, np.newaxis],
             qy_voxels[:, np.newaxis], int_voxels[:, np.newaxis]),
            axis=1)
        # update the number of points without nan
        nb_points.append(len(qx_voxels))
        print('q={:.3f}:'.format(q_value), ' removing', nan_indices.size,
              'nan values,', nb_points[counter], 'remain')

        del qx_voxels, qz_voxels, qy_voxels, int_voxels, indices, nan_indices
        gc.collect()
    del qx, qy, qz, distances, data
    gc.collect()

    ############################################
    # calculate the cross-correlation function #
    ############################################
    cross_corr = np.empty((len(q_range), int(180 / angular_resolution), 2))
    angular_bins = np.linspace(start=0,
                               stop=np.pi,
                               num=corr_count.shape[0],
                               endpoint=False)

    start = time.time()
    print("\nNumber of processors: ", mp.cpu_count())
    mp.freeze_support()

    for ind_q in range(len(q_range)):
        pool = mp.Pool(mp.cpu_count())  # use this number of processes
        if calc_self:
            key_q1 = 'q' + str(ind_q + 1)
            key_q2 = key_q1
            print('\n' + key_q2 +
                  ': the CCF will be calculated over {:d} * {:d}'
                  ' points and {:d} angular bins'.format(
                      nb_points[ind_q], nb_points[ind_q], corr_count.shape[0]))
            for ind_point in range(nb_points[ind_q]):
                pool.apply_async(xcca.calc_ccf_rect,
                                 args=(ind_point, key_q1, key_q2, angular_bins,
                                       q_int),
                                 callback=collect_result,
                                 error_callback=util.catch_error)
        else:
            key_q1 = 'q1'
            key_q2 = 'q' + str(ind_q + 1)
            print('\n' + key_q2 +
                  ': the CCF will be calculated over {:d} * {:d}'
                  ' points and {:d} angular bins'.format(
                      nb_points[0], nb_points[ind_q], corr_count.shape[0]))
            for ind_point in range(nb_points[0]):
                pool.apply_async(xcca.calc_ccf_rect,
                                 args=(ind_point, key_q1, key_q2, angular_bins,
                                       q_int),
                                 callback=collect_result,
                                 error_callback=util.catch_error)

        # close the pool and let all the processes complete
        pool.close()
        pool.join(
        )  # postpones the execution of next line of code until all processes in the queue are done.

        # normalize the cross-correlation by the counter
        indices = np.nonzero(corr_count[:, 1])
        corr_count[indices,
                   0] = corr_count[indices, 0] / corr_count[indices, 1]
        cross_corr[ind_q, :, :] = corr_count

        # initialize the globals for the next q value
        corr_count = np.zeros(
            (int(180 / angular_resolution),
             2))  # corr_count is declared as a global, this should work
        current_point = 0

    end = time.time()
    print('\nTime ellapsed for the calculation of the CCF map:',
          str(datetime.timedelta(seconds=int(end - start))))

    #######################################
    # save the cross-correlation function #
    #######################################
    if calc_self:
        user_comment = user_comment + '_self'
    else:
        user_comment = user_comment + '_cross'
    filename = 'CCFmap_qstart={:.3f}_qstop={:.3f}'.format(q_range[0], q_range[-1]) +\
               '_res{:.3f}'.format(angular_resolution) + user_comment
    np.savez_compressed(savedir + filename + '.npz',
                        angles=180 * angular_bins / np.pi,
                        q_range=q_range,
                        ccf=cross_corr[:, :, 0],
                        points=cross_corr[:, :, 1])

    #######################################
    # plot the cross-correlation function #
    #######################################
    # find the y limit excluding the peaks at 0 and 180 degrees
    indices = np.argwhere(
        np.logical_and((angular_bins >= 20 * np.pi / 180),
                       (angular_bins <= 160 * np.pi / 180)))
    vmax = 1.2 * cross_corr[:, indices, 0].max()
    print('Discarding CCF values with a zero counter:',
          (cross_corr[:, :, 1] == 0).sum(), 'points masked')
    cross_corr[(cross_corr[:, :, 1] == 0),
               0] = np.nan  # discard these values of the CCF

    dq = q_range[1] - q_range[0]
    fig, ax = plt.subplots()
    plt0 = ax.imshow(
        cross_corr[:, :, 0],
        cmap=my_cmap,
        vmin=0,
        vmax=vmax,
        extent=[0, 180, q_range[-1] + dq / 2,
                q_range[0] - dq / 2])  # extent (left, right, bottom, top)
    ax.set_xlabel('Angle (deg)')
    ax.set_ylabel('q (nm$^{-1}$)')
    ax.set_xticks(np.arange(0, 181, 30))
    ax.set_yticks(q_range)
    ax.set_aspect('auto')
    if calc_self:
        ax.set_title('self CCF from q={:.3f} 1/nm  to q={:.3f} 1/nm'.format(
            q_range[0], q_range[-1]))
    else:
        ax.set_title('cross CCF from q={:.3f} 1/nm  to q={:.3f} 1/nm'.format(
            q_range[0], q_range[-1]))
    gu.colorbar(plt0, scale='linear', numticks=5)
    fig.savefig(savedir + filename + '.png')

    plt.ioff()
    plt.show()
Ejemplo n.º 2
0
def main(user_comment):
    """
    Protection for multiprocessing.

    :param user_comment: comment to include in the filename when saving results
    """
    ##########################
    # check input parameters #
    ##########################
    global corr_count

    if len(q_xcca) != 2:
        raise ValueError("Two q values should be provided (it can be the same value)")
    if len(origin_qspace) != 3:
        raise ValueError("origin_qspace should be a tuple of 3 integer pixel values")
    q_xcca.sort()
    same_q = q_xcca[0] == q_xcca[1]
    warnings.filterwarnings("ignore")

    ###################
    # define colormap #
    ###################
    bad_color = "1.0"  # white background
    colormap = gu.Colormap(bad_color=bad_color)
    my_cmap = colormap.cmap
    plt.ion()

    ###################################
    # load experimental data and mask #
    ###################################
    plt.ion()
    root = tk.Tk()
    root.withdraw()
    file_path = filedialog.askopenfilename(
        initialdir=datadir,
        title="Select the 3D reciprocal space map",
        filetypes=[("NPZ", "*.npz")],
    )
    data = np.load(file_path)["data"]

    file_path = filedialog.askopenfilename(
        initialdir=datadir, title="Select the 3D mask", filetypes=[("NPZ", "*.npz")]
    )
    mask = np.load(file_path)["mask"]

    print((data > hotpix_threshold).sum(), " hotpixels masked")
    mask[data > hotpix_threshold] = 1
    data[np.nonzero(mask)] = np.nan
    del mask
    gc.collect()

    file_path = filedialog.askopenfilename(
        initialdir=datadir, title="Select q values", filetypes=[("NPZ", "*.npz")]
    )
    qvalues = np.load(file_path)
    qx = qvalues["qx"]
    qz = qvalues["qz"]
    qy = qvalues["qy"]

    del qvalues
    gc.collect()

    ##############################################################
    # calculate the angular average using mean and median values #
    ##############################################################
    if plot_meandata:
        q_axis, y_mean_masked, y_median_masked = xcca.angular_avg(
            data=data,
            q_values=(qx, qz, qy),
            origin=origin_qspace,
            nb_bins=250,
            debugging=debug,
        )
        fig, ax = plt.subplots(1, 1)
        ax.plot(q_axis, np.log10(y_mean_masked), "r", label="mean")
        ax.plot(q_axis, np.log10(y_median_masked), "b", label="median")
        ax.axvline(x=q_xcca[0], ymin=0, ymax=1, color="g", linestyle="--", label="q1")
        ax.axvline(x=q_xcca[1], ymin=0, ymax=1, color="r", linestyle=":", label="q2")
        ax.set_xlabel("q (1/nm)")
        ax.set_ylabel("Angular average (A.U.)")
        ax.legend()
        plt.pause(0.1)
        fig.savefig(savedir + "1D_average.png")

        del q_axis, y_median_masked, y_mean_masked

    ##############################################################
    # interpolate the data onto spheres at user-defined q values #
    ##############################################################
    # calculate the matrix of distances from the origin of reciprocal space
    distances = np.sqrt(
        (qx[:, np.newaxis, np.newaxis] - qx[origin_qspace[0]]) ** 2
        + (qz[np.newaxis, :, np.newaxis] - qz[origin_qspace[1]]) ** 2
        + (qy[np.newaxis, np.newaxis, :] - qy[origin_qspace[2]]) ** 2
    )
    dq = min(qx[1] - qx[0], qz[1] - qz[0], qy[1] - qy[0])

    q_int = {}  # create dictionnary
    dict_fields = ["q1", "q2"]
    nb_points = []

    for counter, q_value in enumerate(q_xcca):
        if (counter == 0) or ((counter == 1) and not same_q):
            indices = np.nonzero(
                (np.logical_and((distances < q_value + dq), (distances > q_value - dq)))
            )
            nb_voxels = indices[0].shape
            print(
                "\nNumber of voxels for the sphere of radius q ={:.3f} 1/nm:".format(
                    q_value
                ),
                nb_voxels,
            )

            qx_voxels = qx[indices[0]]  # qx downstream, axis 0
            qz_voxels = qz[indices[1]]  # qz vertical up, axis 1
            qy_voxels = qy[indices[2]]  # qy outboard, axis 2
            int_voxels = data[indices]

            if debug:
                # calculate the stereographic projection
                stereo_proj, uv_labels = fu.calc_stereoproj_facet(
                    projection_axis=1,
                    radius_mean=q_value,
                    stereo_center=0,
                    vectors=np.concatenate(
                        (
                            qx_voxels[:, np.newaxis],
                            qz_voxels[:, np.newaxis],
                            qy_voxels[:, np.newaxis],
                        ),
                        axis=1,
                    ),
                )
                # plot the projection from the South pole
                fig, _ = gu.scatter_stereographic(
                    euclidian_u=stereo_proj[:, 0],
                    euclidian_v=stereo_proj[:, 1],
                    color=int_voxels,
                    title="Projection from the South pole"
                    " at q={:.3f} (1/nm)".format(q_value),
                    uv_labels=uv_labels,
                    cmap=my_cmap,
                )
                fig.savefig(savedir + "South pole_q={:.3f}.png".format(q_value))
                plt.close(fig)

                # plot the projection from the North pole
                fig, _ = gu.scatter_stereographic(
                    euclidian_u=stereo_proj[:, 2],
                    euclidian_v=stereo_proj[:, 3],
                    color=int_voxels,
                    title="Projection from the North pole"
                    " at q={:.3f} (1/nm)".format(q_value),
                    uv_labels=uv_labels,
                    cmap=my_cmap,
                )
                fig.savefig(savedir + "North pole_q={:.3f}.png".format(q_value))
                plt.close(fig)

            # look for nan values
            nan_indices = np.argwhere(np.isnan(int_voxels))

            #  remove nan values before calculating the cross-correlation function
            qx_voxels = np.delete(qx_voxels, nan_indices)
            qz_voxels = np.delete(qz_voxels, nan_indices)
            qy_voxels = np.delete(qy_voxels, nan_indices)
            int_voxels = np.delete(int_voxels, nan_indices)

            # normalize the intensity by the median value (remove the influence of
            # the form factor)
            print(
                "q={:.3f}:".format(q_value),
                " normalizing by the median value",
                np.median(int_voxels),
            )
            int_voxels = int_voxels / np.median(int_voxels)

            q_int[dict_fields[counter]] = np.concatenate(
                (
                    qx_voxels[:, np.newaxis],
                    qz_voxels[:, np.newaxis],
                    qy_voxels[:, np.newaxis],
                    int_voxels[:, np.newaxis],
                ),
                axis=1,
            )
            # update the number of points without nan
            nb_points.append(len(qx_voxels))
            print(
                "q={:.3f}:".format(q_value),
                " removing",
                nan_indices.size,
                "nan values,",
                nb_points[counter],
                "remain",
            )

            del qx_voxels, qz_voxels, qy_voxels, int_voxels, indices, nan_indices
            gc.collect()
    del qx, qy, qz, distances, data
    gc.collect()

    ############################################
    # calculate the cross-correlation function #
    ############################################
    if same_q:
        key_q2 = "q1"
        print(
            "\nThe CCF will be calculated over {:d} * {:d}"
            " points and {:d} angular bins".format(
                nb_points[0], nb_points[0], corr_count.shape[0]
            )
        )
    else:
        key_q2 = "q2"
        print(
            "\nThe CCF will be calculated over {:d} * {:d}"
            " points and {:d} angular bins".format(
                nb_points[0], nb_points[1], corr_count.shape[0]
            )
        )

    angular_bins = np.linspace(
        start=0, stop=np.pi, num=corr_count.shape[0], endpoint=False
    )

    start = time.time()
    if single_proc:
        for idx in range(nb_points[0]):
            ccf_uniq_val, counter_val, counter_indices = xcca.calc_ccf_rect(
                point=idx,
                q1_name="q1",
                q2_name=key_q2,
                bin_values=angular_bins,
                q_int=q_int,
            )
            collect_result_debug(ccf_uniq_val, counter_val, counter_indices)
    else:
        print("\nNumber of processors: ", mp.cpu_count())
        mp.freeze_support()
        pool = mp.Pool(mp.cpu_count())  # use this number of processes
        for idx in range(nb_points[0]):
            pool.apply_async(
                xcca.calc_ccf_rect,
                args=(idx, "q1", key_q2, angular_bins, q_int),
                callback=collect_result,
                error_callback=util.catch_error,
            )
        # close the pool and let all the processes complete
        pool.close()
        pool.join()  # postpones the execution of next line of code until all
        # processes in the queue are done.
    end = time.time()
    print(
        "\nTime ellapsed for the calculation of the CCF:",
        str(datetime.timedelta(seconds=int(end - start))),
    )

    # normalize the cross-correlation by the counter
    indices = np.nonzero(corr_count[:, 1])
    corr_count[indices, 0] = corr_count[indices, 0] / corr_count[indices, 1]

    #######################################
    # save the cross-correlation function #
    #######################################
    filename = (
        "CCF_q1={:.3f}_q2={:.3f}".format(q_xcca[0], q_xcca[1])
        + "_points{:d}_res{:.3f}".format(nb_points[0], angular_resolution)
        + user_comment
    )
    np.savez_compressed(
        savedir + filename + ".npz",
        angles=180 * angular_bins / np.pi,
        ccf=corr_count[:, 0],
        points=corr_count[:, 1],
    )

    #######################################
    # plot the cross-correlation function #
    #######################################
    # find the y limit excluding the peaks at 0 and 180 degrees
    indices = np.argwhere(
        np.logical_and(
            (angular_bins >= 5 * np.pi / 180), (angular_bins <= 175 * np.pi / 180)
        )
    )
    ymax = 1.2 * corr_count[indices, 0].max()
    print(
        "Discarding CCF values with a zero counter:",
        (corr_count[:, 1] == 0).sum(),
        "points masked",
    )
    corr_count[(corr_count[:, 1] == 0), 0] = np.nan  # discard these values of the CCF

    fig, ax = plt.subplots()
    ax.plot(
        180 * angular_bins / np.pi,
        corr_count[:, 0],
        color="red",
        linestyle="-",
        markerfacecolor="blue",
        marker=".",
    )
    ax.set_xlim(0, 180)
    ax.set_ylim(0, ymax)
    ax.set_xlabel("Angle (deg)")
    ax.set_ylabel("Cross-correlation")
    ax.set_xticks(np.arange(0, 181, 30))
    ax.set_title(
        "CCF at q1={:.3f} 1/nm  and q2={:.3f} 1/nm".format(q_xcca[0], q_xcca[1])
    )
    fig.savefig(savedir + filename + ".png")

    _, ax = plt.subplots()
    ax.plot(
        180 * angular_bins / np.pi,
        corr_count[:, 1],
        linestyle="None",
        markerfacecolor="blue",
        marker=".",
    )
    ax.set_xlim(0, 180)
    ax.set_xlabel("Angle (deg)")
    ax.set_ylabel("Number of points")
    ax.set_xticks(np.arange(0, 181, 30))
    ax.set_title("Points per angular bin")
    plt.ioff()
    plt.show()
Ejemplo n.º 3
0
def main(calc_self, user_comment):
    """
    Protection for multiprocessing.

    :param calc_self: if True, the cross-correlation will be calculated between same
     q-values
    :param user_comment: comment to include in the filename when saving results
    """
    ##########################
    # check input parameters #
    ##########################
    global corr_count, current_point
    if len(origin_qspace) != 3:
        raise ValueError(
            "origin_qspace should be a tuple of 3 integer pixel values")
    if type(calc_self) is not bool:
        raise TypeError(f"got unexpected type {type(calc_self)} for calc_self")
    if len(q_range) <= 1:
        raise ValueError("at least 2 values are needed for q_range")

    print("the CCF map will be calculated for {:d} q values: ".format(
        len(q_range)))
    for _, item in enumerate(q_range):
        if calc_self:
            print(f"q1 = {item:.3f}  q2 = {item:.3f}")
        else:
            print("q1 = {:.3f}  q2 = {:.3f}".format(q_range[0], item))
    warnings.filterwarnings("ignore")

    ###################
    # define colormap #
    ###################
    bad_color = "1.0"  # white background
    my_cmap = ColormapFactory(bad_color=bad_color).generate_cmap()
    plt.ion()

    ###################################
    # load experimental data and mask #
    ###################################
    plt.ion()
    root = tk.Tk()
    root.withdraw()
    file_path = filedialog.askopenfilename(
        initialdir=datadir,
        title="Select the 3D reciprocal space map",
        filetypes=[("NPZ", "*.npz")],
    )
    data = np.load(file_path)["data"]

    file_path = filedialog.askopenfilename(initialdir=datadir,
                                           title="Select the 3D mask",
                                           filetypes=[("NPZ", "*.npz")])
    mask = np.load(file_path)["mask"]

    print((data > hotpix_threshold).sum(), " hotpixels masked")
    mask[data > hotpix_threshold] = 1
    data[np.nonzero(mask)] = np.nan
    del mask
    gc.collect()

    file_path = filedialog.askopenfilename(initialdir=datadir,
                                           title="Select q values",
                                           filetypes=[("NPZ", "*.npz")])
    qvalues = np.load(file_path)
    qx = qvalues["qx"]
    qz = qvalues["qz"]
    qy = qvalues["qy"]

    del qvalues
    gc.collect()

    ##############################################################
    # calculate the angular average using mean and median values #
    ##############################################################
    if plot_meandata:
        q_axis, y_mean_masked, y_median_masked = xcca.angular_avg(
            data=data,
            q_values=(qx, qz, qy),
            origin=origin_qspace,
            nb_bins=250,
            debugging=debug,
        )
        fig, ax = plt.subplots(1, 1)
        ax.plot(q_axis, np.log10(y_mean_masked), "r", label="mean")
        ax.plot(q_axis, np.log10(y_median_masked), "b", label="median")
        ax.axvline(x=q_range[0],
                   ymin=0,
                   ymax=1,
                   color="g",
                   linestyle="--",
                   label="q_start")
        ax.axvline(x=q_range[-1],
                   ymin=0,
                   ymax=1,
                   color="r",
                   linestyle=":",
                   label="q_stop")
        ax.set_xlabel("q (1/nm)")
        ax.set_ylabel("Angular average (A.U.)")
        ax.legend()
        plt.pause(0.1)
        fig.savefig(savedir + "1D_average.png")

        del q_axis, y_median_masked, y_mean_masked

    ##############################################################
    # interpolate the data onto spheres at user-defined q values #
    ##############################################################
    # calculate the matrix of distances from the origin of reciprocal space
    distances = np.sqrt(
        (qx[:, np.newaxis, np.newaxis] - qx[origin_qspace[0]])**2 +
        (qz[np.newaxis, :, np.newaxis] - qz[origin_qspace[1]])**2 +
        (qy[np.newaxis, np.newaxis, :] - qy[origin_qspace[2]])**2)
    dq = min(qx[1] - qx[0], qz[1] - qz[0], qy[1] - qy[0])

    theta_phi_int = {}  # create dictionnary
    dict_fields = ["q" + str(idx + 1) for idx, _ in enumerate(q_range)]
    # ['q1', 'q2', 'q3', ...]
    nb_points = []

    for counter, q_value in enumerate(q_range):
        nb_pixels = (np.logical_and((distances < q_value + dq),
                                    (distances > q_value - dq))).sum()

        print(
            "\nNumber of voxels for the sphere of radius q ={:.3f} 1/nm:".
            format(q_value),
            nb_pixels,
        )

        nb_pixels = int(nb_pixels / interp_factor)
        print("Dividing the number of voxels by interp_factor: "
              f"{nb_pixels:d} remaining voxels ")

        indices = np.arange(0, nb_pixels, dtype=float) + 0.5

        # angles for interpolation are chosen using the 'golden spiral method',
        # so that the corresponding points are evenly distributed on the sphere
        theta = np.arccos(
            1 - 2 * indices /
            nb_pixels)  # theta is the polar angle of the spherical coordinates
        phi = (np.pi * (1 + np.sqrt(5)) * indices
               )  # phi is the azimuthal angle of the spherical coordinates

        qx_sphere = q_value * np.cos(phi) * np.sin(theta)
        qz_sphere = q_value * np.cos(theta)
        qy_sphere = q_value * np.sin(phi) * np.sin(theta)

        # interpolate the data onto the new points
        rgi = RegularGridInterpolator((qx, qz, qy),
                                      data,
                                      method="linear",
                                      bounds_error=False,
                                      fill_value=np.nan)
        sphere_int = rgi(
            np.concatenate((
                qx_sphere.reshape((1, nb_pixels)),
                qz_sphere.reshape((1, nb_pixels)),
                qy_sphere.reshape((1, nb_pixels)),
            )).transpose())

        # look for nan values
        nan_indices = np.argwhere(np.isnan(sphere_int))
        if debug:
            sphere_debug = np.copy(
                sphere_int
            )  # create a copy to see also nans in the debugging plot
        else:
            sphere_debug = None

        #  remove nan values before calculating the cross-correlation function
        theta = np.delete(theta, nan_indices)
        phi = np.delete(phi, nan_indices)
        sphere_int = np.delete(sphere_int, nan_indices)

        # normalize the intensity by the median value (remove the influence of the
        # form factor)
        print(
            "q={:.3f}:".format(q_value),
            " normalizing by the median value",
            np.median(sphere_int),
        )
        sphere_int = sphere_int / np.median(sphere_int)

        theta_phi_int[dict_fields[counter]] = np.concatenate(
            (theta[:, np.newaxis], phi[:, np.newaxis], sphere_int[:,
                                                                  np.newaxis]),
            axis=1,
        )
        # update the number of points without nan
        nb_points.append(len(theta))
        print(
            "q={:.3f}:".format(q_value),
            " removing",
            nan_indices.size,
            "nan values,",
            nb_points[counter],
            "remain",
        )

        if debug:
            # calculate the stereographic projection
            stereo_proj, uv_labels = fu.calc_stereoproj_facet(
                projection_axis=1,
                radius_mean=q_value,
                stereo_center=0,
                vectors=np.concatenate(
                    (
                        qx_sphere[:, np.newaxis],
                        qz_sphere[:, np.newaxis],
                        qy_sphere[:, np.newaxis],
                    ),
                    axis=1,
                ),
            )
            # plot the projection from the South pole
            fig, _ = gu.scatter_stereographic(
                euclidian_u=stereo_proj[:, 0],
                euclidian_v=stereo_proj[:, 1],
                color=sphere_debug,
                title="Projection from the South pole"
                " at q={:.3f} (1/nm)".format(q_value),
                uv_labels=uv_labels,
                cmap=my_cmap,
            )
            fig.savefig(savedir + "South pole_q={:.3f}.png".format(q_value))
            plt.close(fig)

            # plot the projection from the North pole
            fig, _ = gu.scatter_stereographic(
                euclidian_u=stereo_proj[:, 2],
                euclidian_v=stereo_proj[:, 3],
                color=sphere_debug,
                title="Projection from the North pole"
                " at q={:.3f} (1/nm)".format(q_value),
                uv_labels=uv_labels,
                cmap=my_cmap,
            )
            fig.savefig(savedir + "North pole_q={:.3f}.png".format(q_value))
            plt.close(fig)
            del sphere_debug

        del (
            qx_sphere,
            qz_sphere,
            qy_sphere,
            theta,
            phi,
            sphere_int,
            indices,
            nan_indices,
        )
        gc.collect()
    del qx, qy, qz, distances, data
    gc.collect()

    ############################################
    # calculate the cross-correlation function #
    ############################################
    cross_corr = np.empty((len(q_range), int(180 / angular_resolution), 2))
    angular_bins = np.linspace(start=0,
                               stop=np.pi,
                               num=corr_count.shape[0],
                               endpoint=False)

    start = time.time()
    print("\nNumber of processors: ", mp.cpu_count())
    mp.freeze_support()

    for ind_q in range(len(q_range)):
        pool = mp.Pool(mp.cpu_count())  # use this number of processes
        if calc_self:
            key_q1 = "q" + str(ind_q + 1)
            key_q2 = key_q1
            print("\n" + key_q2 +
                  ": the CCF will be calculated over {:d} * {:d}"
                  " points and {:d} angular bins".format(
                      nb_points[ind_q], nb_points[ind_q], corr_count.shape[0]))
            for ind_point in range(nb_points[ind_q]):
                pool.apply_async(
                    xcca.calc_ccf_polar,
                    args=(ind_point, key_q1, key_q2, angular_bins,
                          theta_phi_int),
                    callback=collect_result,
                    error_callback=util.catch_error,
                )
        else:
            key_q1 = "q1"
            key_q2 = "q" + str(ind_q + 1)
            print("\n" + key_q2 +
                  ": the CCF will be calculated over {:d} * {:d}"
                  " points and {:d} angular bins".format(
                      nb_points[0], nb_points[ind_q], corr_count.shape[0]))
        for ind_point in range(nb_points[0]):
            pool.apply_async(
                xcca.calc_ccf_polar,
                args=(ind_point, key_q1, key_q2, angular_bins, theta_phi_int),
                callback=collect_result,
                error_callback=util.catch_error,
            )

        # close the pool and let all the processes complete
        pool.close()
        pool.join()  # postpones the execution of next line of code until all
        # processes in the queue are done.

        # normalize the cross-correlation by the counter
        indices = np.nonzero(corr_count[:, 1])
        corr_count[indices,
                   0] = corr_count[indices, 0] / corr_count[indices, 1]
        cross_corr[ind_q, :, :] = corr_count

        # initialize the globals for the next q value
        corr_count = np.zeros(
            (int(180 / angular_resolution),
             2))  # corr_count is declared as a global, this should work
        current_point = 0

    end = time.time()
    print(
        "\nTime ellapsed for the calculation of the CCF map:",
        str(datetime.timedelta(seconds=int(end - start))),
    )

    #######################################
    # save the cross-correlation function #
    #######################################
    if calc_self:
        user_comment = user_comment + "_self"
    else:
        user_comment = user_comment + "_cross"
    filename = (
        "CCFmap_qstart={:.3f}_qstop={:.3f}".format(q_range[0], q_range[-1]) +
        "_interp{:d}_res{:.3f}".format(interp_factor, angular_resolution) +
        user_comment)
    np.savez_compressed(
        savedir + filename + ".npz",
        angles=180 * angular_bins / np.pi,
        q_range=q_range,
        ccf=cross_corr[:, :, 0],
        points=cross_corr[:, :, 1],
    )

    #######################################
    # plot the cross-correlation function #
    #######################################
    # find the y limit excluding the peaks at 0 and 180 degrees
    indices = np.argwhere(
        np.logical_and((angular_bins >= 20 * np.pi / 180),
                       (angular_bins <= 160 * np.pi / 180)))
    vmax = 1.2 * cross_corr[:, indices, 0].max()
    print(
        "Discarding CCF values with a zero counter:",
        (cross_corr[:, :, 1] == 0).sum(),
        "points masked",
    )
    cross_corr[(cross_corr[:, :, 1] == 0),
               0] = np.nan  # discard these values of the CCF

    dq = q_range[1] - q_range[0]
    fig, ax = plt.subplots()
    plt0 = ax.imshow(
        cross_corr[:, :, 0],
        cmap=my_cmap,
        vmin=0,
        vmax=vmax,
        extent=[0, 180, q_range[-1] + dq / 2, q_range[0] - dq / 2],
    )  # extent (left, right, bottom, top)
    ax.set_xlabel("Angle (deg)")
    ax.set_ylabel("q (nm$^{-1}$)")
    ax.set_xticks(np.arange(0, 181, 30))
    ax.set_yticks(q_range)
    ax.set_aspect("auto")
    if calc_self:
        ax.set_title("self CCF from q={:.3f} 1/nm  to q={:.3f} 1/nm".format(
            q_range[0], q_range[-1]))
    else:
        ax.set_title("cross CCF from q={:.3f} 1/nm  to q={:.3f} 1/nm".format(
            q_range[0], q_range[-1]))
    gu.colorbar(plt0, scale="linear", numticks=5)
    fig.savefig(savedir + filename + ".png")

    plt.ioff()
    plt.show()
Ejemplo n.º 4
0
    qy = npzfile["qy"]  # outboard
    numz, numy, numx = len(qx), len(qz), len(qy)
    qx = qx[:numz - (numz % bin_factor):bin_factor]
    qz = qz[:numy - (numy % bin_factor):bin_factor]
    qy = qy[:numx - (numx % bin_factor):bin_factor]
    del numz, numy, numx

else:  # work with pixels, supposing that the data is in an orthonormal frame
    qx = np.arange(nz) - origin[0]
    qz = np.arange(ny) - origin[1]
    qy = np.arange(nx) - origin[2]

q_axis, y_mean_masked, y_median_masked = xcca.angular_avg(
    data=diff_pattern,
    q_values=(qx, qz, qy),
    origin=origin,
    mask=mask,
    nb_bins=nb_bins,
    debugging=debug,
)
#############
# save data #
#############
np.savez_compressed(savedir + "q+angular_avg.npz",
                    q=q_axis,
                    avg=y_mean_masked,
                    median=y_median_masked)
if save_txt:
    with open(savedir + "q+angular_avg.txt", "w") as file:
        file.write("{:8s}".format("q") + "\t" + "{:10s}".format("mean") +
                   "\t" + "{:10s}".format("median") + "\n")
        for idx, item in enumerate(q_axis):
Ejemplo n.º 5
0
def main(user_comment):
    """
    Protection for multiprocessing.

    :param user_comment: comment to include in the filename when saving results
    """
    ##########################
    # check input parameters #
    ##########################
    global corr_count

    assert len(
        q_xcca
    ) == 2, "Two q values should be provided (it can be the same value)"
    assert len(
        origin_qspace
    ) == 3, "origin_qspace should be a tuple of 3 integer pixel values"
    q_xcca.sort()
    if q_xcca[0] == q_xcca[1]:
        same_q = True
    else:
        same_q = False
    warnings.filterwarnings("ignore")

    ###################
    # define colormap #
    ###################
    bad_color = '1.0'  # white background
    colormap = gu.Colormap(bad_color=bad_color)
    my_cmap = colormap.cmap
    plt.ion()

    ###################################
    # load experimental data and mask #
    ###################################
    plt.ion()
    root = tk.Tk()
    root.withdraw()
    file_path = filedialog.askopenfilename(
        initialdir=datadir,
        title="Select the 3D reciprocal space map",
        filetypes=[("NPZ", "*.npz")])
    data = np.load(file_path)['data']

    file_path = filedialog.askopenfilename(initialdir=datadir,
                                           title="Select the 3D mask",
                                           filetypes=[("NPZ", "*.npz")])
    mask = np.load(file_path)['mask']

    print((data > hotpix_threshold).sum(), ' hotpixels masked')
    mask[data > hotpix_threshold] = 1
    data[np.nonzero(mask)] = np.nan
    del mask
    gc.collect()

    file_path = filedialog.askopenfilename(initialdir=datadir,
                                           title="Select q values",
                                           filetypes=[("NPZ", "*.npz")])
    qvalues = np.load(file_path)
    qx = qvalues['qx']
    qz = qvalues['qz']
    qy = qvalues['qy']

    del qvalues
    gc.collect()

    ##############################################################
    # calculate the angular average using mean and median values #
    ##############################################################
    if plot_meandata:
        q_axis, y_mean_masked, y_median_masked = xcca.angular_avg(
            data=data,
            q_values=(qx, qz, qy),
            origin=origin_qspace,
            nb_bins=250,
            debugging=debug)
        fig, ax = plt.subplots(1, 1)
        ax.plot(q_axis, np.log10(y_mean_masked), 'r', label='mean')
        ax.plot(q_axis, np.log10(y_median_masked), 'b', label='median')
        ax.axvline(x=q_xcca[0],
                   ymin=0,
                   ymax=1,
                   color='g',
                   linestyle='--',
                   label='q1')
        ax.axvline(x=q_xcca[1],
                   ymin=0,
                   ymax=1,
                   color='r',
                   linestyle=':',
                   label='q2')
        ax.set_xlabel('q (1/nm)')
        ax.set_ylabel('Angular average (A.U.)')
        ax.legend()
        plt.pause(0.1)
        fig.savefig(savedir + '1D_average.png')

        del q_axis, y_median_masked, y_mean_masked

    ##############################################################
    # interpolate the data onto spheres at user-defined q values #
    ##############################################################
    # calculate the matrix of distances from the origin of reciprocal space
    distances = np.sqrt(
        (qx[:, np.newaxis, np.newaxis] - qx[origin_qspace[0]])**2 +
        (qz[np.newaxis, :, np.newaxis] - qz[origin_qspace[1]])**2 +
        (qy[np.newaxis, np.newaxis, :] - qy[origin_qspace[2]])**2)
    dq = min(qx[1] - qx[0], qz[1] - qz[0], qy[1] - qy[0])

    theta_phi_int = dict()  # create dictionnary
    dict_fields = ['q1', 'q2']
    nb_points = []

    for counter, q_value in enumerate(q_xcca):
        if (counter == 0) or ((counter == 1) and not same_q):
            nb_pixels = (np.logical_and((distances < q_value + dq),
                                        (distances > q_value - dq))).sum()

            print(
                '\nNumber of voxels for the sphere of radius q ={:.3f} 1/nm:'.
                format(q_value), nb_pixels)

            nb_pixels = int(nb_pixels / interp_factor)
            print(
                'Dividing the number of voxels by interp_factor: {:d} voxels remaining'
                .format(nb_pixels))

            indices = np.arange(0, nb_pixels, dtype=float) + 0.5

            # angles for interpolation are chosen using the 'golden spiral method', so that the corresponding points
            # are evenly distributed on the sphere
            theta = np.arccos(
                1 - 2 * indices / nb_pixels
            )  # theta is the polar angle of the spherical coordinates
            phi = np.pi * (
                1 + np.sqrt(5)
            ) * indices  # phi is the azimuthal angle of the spherical coordinates

            qx_sphere = q_value * np.cos(phi) * np.sin(theta)
            qz_sphere = q_value * np.cos(theta)
            qy_sphere = q_value * np.sin(phi) * np.sin(theta)

            # interpolate the data onto the new points
            rgi = RegularGridInterpolator((qx, qz, qy),
                                          data,
                                          method='linear',
                                          bounds_error=False,
                                          fill_value=np.nan)
            sphere_int = rgi(
                np.concatenate((qx_sphere.reshape(
                    (1, nb_pixels)), qz_sphere.reshape(
                        (1, nb_pixels)), qy_sphere.reshape(
                            (1, nb_pixels)))).transpose())

            # look for nan values
            nan_indices = np.argwhere(np.isnan(sphere_int))
            if debug:
                sphere_debug = np.copy(
                    sphere_int
                )  # create a copy to see also nans in the debugging plot

            #  remove nan values before calculating the cross-correlation function
            theta = np.delete(theta, nan_indices)
            phi = np.delete(phi, nan_indices)
            sphere_int = np.delete(sphere_int, nan_indices)

            # normalize the intensity by the median value (remove the influence of the form factor)
            print('q={:.3f}:'.format(q_value),
                  ' normalizing by the median value', np.median(sphere_int))
            sphere_int = sphere_int / np.median(sphere_int)

            theta_phi_int[dict_fields[counter]] = np.concatenate(
                (theta[:, np.newaxis], phi[:, np.newaxis],
                 sphere_int[:, np.newaxis]),
                axis=1)
            # update the number of points without nan
            nb_points.append(len(theta))
            print('q={:.3f}:'.format(q_value), ' removing', nan_indices.size,
                  'nan values,', nb_points[counter], 'remain')

            if debug:
                # calculate the stereographic projection
                stereo_proj, uv_labels = fu.calc_stereoproj_facet(
                    projection_axis=1,
                    radius_mean=q_value,
                    stereo_center=0,
                    vectors=np.concatenate(
                        (qx_sphere[:, np.newaxis], qz_sphere[:, np.newaxis],
                         qy_sphere[:, np.newaxis]),
                        axis=1))
                # plot the projection from the South pole
                fig, _ = gu.scatter_stereographic(
                    euclidian_u=stereo_proj[:, 0],
                    euclidian_v=stereo_proj[:, 1],
                    color=sphere_debug,
                    title='Projection from the South pole'
                    ' at q={:.3f} (1/nm)'.format(q_value),
                    uv_labels=uv_labels,
                    cmap=my_cmap)
                fig.savefig(savedir +
                            'South pole_q={:.3f}.png'.format(q_value))
                plt.close(fig)

                # plot the projection from the North pole
                fig, _ = gu.scatter_stereographic(
                    euclidian_u=stereo_proj[:, 2],
                    euclidian_v=stereo_proj[:, 3],
                    color=sphere_debug,
                    title='Projection from the North pole'
                    ' at q={:.3f} (1/nm)'.format(q_value),
                    uv_labels=uv_labels,
                    cmap=my_cmap)
                fig.savefig(savedir +
                            'North pole_q={:.3f}.png'.format(q_value))
                plt.close(fig)
                del sphere_debug

            del qx_sphere, qz_sphere, qy_sphere, theta, phi, sphere_int, indices, nan_indices
            gc.collect()
    del qx, qy, qz, distances, data
    gc.collect()

    ############################################
    # calculate the cross-correlation function #
    ############################################
    if same_q:
        key_q2 = 'q1'
        print('\nThe CCF will be calculated over {:d} * {:d}'
              ' points and {:d} angular bins'.format(nb_points[0],
                                                     nb_points[0],
                                                     corr_count.shape[0]))
    else:
        key_q2 = 'q2'
        print('\nThe CCF will be calculated over {:d} * {:d}'
              ' points and {:d} angular bins'.format(nb_points[0],
                                                     nb_points[1],
                                                     corr_count.shape[0]))

    angular_bins = np.linspace(start=0,
                               stop=np.pi,
                               num=corr_count.shape[0],
                               endpoint=False)

    start = time.time()
    if single_proc:
        for idx in range(nb_points[0]):
            ccf_uniq_val, counter_val, counter_indices = \
                 xcca.calc_ccf_polar(point=idx, q1_name='q1', q2_name=key_q2, bin_values=angular_bins,
                                     polar_azi_int=theta_phi_int)
            collect_result_debug(ccf_uniq_val, counter_val, counter_indices)
    else:
        print("\nNumber of processors: ", mp.cpu_count())
        mp.freeze_support()
        pool = mp.Pool(mp.cpu_count())  # use this number of processes
        for idx in range(nb_points[0]):
            pool.apply_async(xcca.calc_ccf_polar,
                             args=(idx, 'q1', key_q2, angular_bins,
                                   theta_phi_int),
                             callback=collect_result,
                             error_callback=util.catch_error)
        # close the pool and let all the processes complete
        pool.close()
        pool.join(
        )  # postpones the execution of next line of code until all processes in the queue are done.
    end = time.time()
    print('\nTime ellapsed for the calculation of the CCF:',
          str(datetime.timedelta(seconds=int(end - start))))

    # normalize the cross-correlation by the counter
    indices = np.nonzero(corr_count[:, 1])
    corr_count[indices, 0] = corr_count[indices, 0] / corr_count[indices, 1]

    #######################################
    # save the cross-correlation function #
    #######################################
    filename = 'CCF_q1={:.3f}_q2={:.3f}'.format(q_xcca[0], q_xcca[1]) +\
               '_points{:d}_interp{:d}_res{:.3f}'.format(nb_points[0], interp_factor, angular_resolution) + user_comment
    np.savez_compressed(savedir + filename + '.npz',
                        angles=180 * angular_bins / np.pi,
                        ccf=corr_count[:, 0],
                        points=corr_count[:, 1])

    #######################################
    # plot the cross-correlation function #
    #######################################
    # find the y limit excluding the peaks at 0 and 180 degrees
    indices = np.argwhere(
        np.logical_and((angular_bins >= 5 * np.pi / 180),
                       (angular_bins <= 175 * np.pi / 180)))
    ymax = 1.2 * corr_count[indices, 0].max()
    print('Discarding CCF values with a zero counter:',
          (corr_count[:, 1] == 0).sum(), 'points masked')
    corr_count[(corr_count[:, 1] == 0),
               0] = np.nan  # discard these values of the CCF

    fig, ax = plt.subplots()
    ax.plot(180 * angular_bins / np.pi,
            corr_count[:, 0],
            color='red',
            linestyle="-",
            markerfacecolor='blue',
            marker='.')
    ax.set_xlim(0, 180)
    ax.set_ylim(0, ymax)
    ax.set_xlabel('Angle (deg)')
    ax.set_ylabel('Cross-correlation')
    ax.set_xticks(np.arange(0, 181, 30))
    ax.set_title('CCF at q1={:.3f} 1/nm  and q2={:.3f} 1/nm'.format(
        q_xcca[0], q_xcca[1]))
    fig.savefig(savedir + filename + '.png')

    _, ax = plt.subplots()
    ax.plot(180 * angular_bins / np.pi,
            corr_count[:, 1],
            linestyle="None",
            markerfacecolor='blue',
            marker='.')
    ax.set_xlim(0, 180)
    ax.set_xlabel('Angle (deg)')
    ax.set_ylabel('Number of points')
    ax.set_xticks(np.arange(0, 181, 30))
    ax.set_title('Points per angular bin')
    plt.ioff()
    plt.show()