def plot_power_spectrum_grid(ps_data_dir,
                             output_dir,
                             ps_data=None,
                             scales='large',
                             sim_data_sets=None,
                             black_background=False,
                             system=None,
                             high_z_only=False,
                             plot_ps_normalized=False):

    if system == 'Lux' or system == 'Summit': matplotlib.use('Agg')
    import matplotlib.pyplot as plt

    fig_height = 5
    fig_width = 8
    fig_dpi = 300

    if high_z_only: fig_height = 8

    label_size = 18
    figure_text_size = 18
    legend_font_size = 16
    tick_label_size_major = 15
    tick_label_size_minor = 13
    tick_size_major = 5
    tick_size_minor = 3
    tick_width_major = 1.5
    tick_width_minor = 1
    border_width = 1

    if system == 'Lux':
        prop = matplotlib.font_manager.FontProperties(fname=os.path.join(
            '/home/brvillas/fonts', "Helvetica.ttf"),
                                                      size=12)
    if system == 'Shamrock':
        prop = matplotlib.font_manager.FontProperties(fname=os.path.join(
            '/home/bruno/fonts/Helvetica', "Helvetica.ttf"),
                                                      size=12)

    dir_boss = ps_data_dir + 'data_power_spectrum_boss/'
    data_filename = dir_boss + 'data_table.py'
    data_boss = load_data_boss(data_filename)
    data_z_boss = data_boss['z_vals']

    data_filename = ps_data_dir + 'data_power_spectrum_walther_2019/data_table.txt'
    data_walther = load_power_spectrum_table(data_filename)
    data_z_w = data_walther['z_vals']

    dir_data_boera = ps_data_dir + 'data_power_spectrum_boera_2019/'
    data_boera = load_tabulated_data_boera(dir_data_boera)
    data_z_b = data_boera['z_vals']

    data_dir_viel = ps_data_dir + 'data_power_spectrum_viel_2013/'
    data_viel = load_tabulated_data_viel(data_dir_viel)
    data_z_v = data_viel['z_vals']

    dir_irsic = ps_data_dir + 'data_power_spectrum_irsic_2017/'
    data_filename = dir_irsic + 'data_table.py'
    data_irsic = load_data_irsic(data_filename)
    data_z_irsic = data_irsic['z_vals']

    z_vals_small_scale = [
        2.0, 2.2, 2.4, 2.6, 2.8, 3.0, 3.2, 3.4, 4.2, 4.6, 5.0, 5.4
    ]
    z_vals_large_scale = [
        2.2, 2.4, 2.6, 2.8, 3.0, 3.2, 3.4, 3.6, 3.8, 4.0, 4.2, 4.4
    ]
    z_vals_middle_scale = [3.0, 3.2, 3.4, 3.6, 3.8, 4.0, 4.2, 4.4]
    z_vals_small_scale_walther = [
        2.0,
        2.2,
        2.4,
        2.6,
        2.8,
        3.0,
        3.2,
        3.4,
    ]
    z_vals_all = [
        2.2,
        2.4,
        2.6,
        2.8,
        3.0,
        3.2,
        3.4,
        3.6,
        3.8,
        4.0,
        4.2,
        4.4,
    ]
    z_vals_small_highz = [
        4.2,
        4.6,
        5.0,
    ]
    z_high = [5.0, 5.4]
    z_large_middle = [
        3.0,
        3.2,
        3.4,
        3.6,
        3.8,
        4.0,
    ]
    z_vals_large_reduced = [
        2.6,
        2.8,
        3.0,
        3.2,
        3.4,
        3.6,
        3.8,
        4.0,
    ]
    z_vals_small_reduced = [4.2, 4.6, 5.0]

    if scales == 'large': z_vals = z_vals_large_scale
    elif scales == 'small': z_vals = z_vals_small_scale
    elif scales == 'middle': z_vals = z_vals_middle_scale
    elif scales == 'small_walther': z_vals = z_vals_small_scale_walther
    elif scales == 'small_highz': z_vals = z_vals_small_highz
    elif scales == 'all': z_vals = z_vals_all
    elif scales == 'large_middle': z_vals = z_large_middle
    elif scales == 'large_reduced': z_vals = z_vals_large_reduced
    elif scales == 'small_reduced': z_vals = z_vals_small_reduced
    else:
        print("ERROR: Scales = large,  small of middle ")
        return

    if high_z_only: z_vals = z_high

    nrows = 3
    ncols = 4

    if scales == 'small_walther': nrows = 2
    if high_z_only: nrows, ncols = 1, 2
    if scales == 'large_middle': ncols, nrows = 3, 2

    if scales == 'middle':
        nrows = 2
        flags = np.zeros((nrows, ncols))

    if scales == 'small_highz': nrows, ncols = 1, 3

    if scales == 'large_reduced': nrows, ncols = 2, 4
    if scales == 'small_reduced': nrows, ncols = 1, 3

    plot_boss, plot_walther, plot_boera, plot_viel, plot_irsic = False, False, False, False, False

    if scales == 'large': plot_boss = True
    if scales == 'all': plot_boss, plot_boera, plot_irsic = True, True, True
    if scales == 'middle': plot_boss, plot_irsic = True, True,
    if scales == 'small_highz': plot_boss, plot_boera = True, True,
    if scales == 'large_middle': plot_boss, plot_irsic = True, True
    if scales == 'large_reduced': plot_boss = True
    if scales == 'small_reduced':
        plot_boera = True
        fig_height *= 1.4

    fig, ax_l = plt.subplots(nrows=nrows,
                             ncols=ncols,
                             figsize=(2 * fig_width, fig_height * nrows))
    plt.subplots_adjust(hspace=0.02, wspace=0.02)

    c_pchw18 = pylab.cm.viridis(.7)
    c_hm12 = pylab.cm.cool(.3)

    c_boss = pylab.cm.viridis(.3)
    c_walther = pylab.cm.viridis(.3)
    c_viel = 'C1'
    c_boera = pylab.cm.Purples(.7)
    c_irsic = pylab.cm.Purples(.7)

    text_color = 'black'
    color_line = c_pchw18

    if scales == 'middle':
        c_walther = 'C3'

    text_color = 'black'

    if black_background:
        text_color = 'white'
        c_boss = 'C1'
        c_irsic = 'C9'
        c_boera = yellows[0]
        blue = blues[4]
        color_line = blue

    for index, current_z in enumerate(z_vals):

        indx_j = index % ncols
        indx_i = index // ncols

        if nrows > 1: ax = ax_l[indx_i][indx_j]
        else: ax = ax_l[indx_j]

        if scales == 'middle': flags[indx_i, indx_j] = 1

        if ps_data:
            for sim_id in ps_data:
                data_sim = ps_data[sim_id]
                label = data_sim['label']
                sim_z_vals = data_sim['z_vals']
                diff = np.abs(sim_z_vals - current_z)
                diff_min = diff.min()
                index = np.where(diff == diff_min)[0][0]
                data = data_sim[index]
                k = data['k_vals']
                ps = data['ps_mean']
                delta = ps * k / np.pi
                if current_z == 4.6: delta *= 1.1
                if current_z == 5.0: delta *= 1.1
                ax.plot(k,
                        delta,
                        linewidth=3,
                        label=label,
                        zorder=1,
                        c=color_line)

        if sim_data_sets:
            for sim_data in sim_data_sets:
                # sim_z_vals = sim_data['z']
                # diff = np.abs( sim_z_vals - current_z )
                # diff_min = diff.min()
                # index = np.where( diff == diff_min )[0][0]
                # # print( index )
                # if diff_min < 0.08:
                #   k = sim_data['ps_kvals'][index]
                #   ps = sim_data['ps_mean'][index]
                #   delta = ps * k / np.pi
                #   ax.plot( k, delta, linewidth=3, label=sim_data['plot_label'], zorder=1  )
                #   # ax.plot( k, delta, c=color_line, linewidth=3, label=sim_data['plot_label']  )
                #
                if plot_ps_normalized:
                    ps_data = sim_data['power_spectrum_normalized']
                    name = ps_data['normalization_key']
                else:
                    ps_data = sim_data['power_spectrum']
                sim_z_vals = ps_data['z']
                diff = np.abs(sim_z_vals - current_z)
                diff_min = diff.min()
                index = np.where(diff == diff_min)[0][0]
                # print( index )
                if diff_min < 0.08:
                    k = ps_data['k_vals'][index]
                    ps = ps_data['ps_mean'][index]
                    delta = ps * k / np.pi
                    ax.plot(k,
                            delta,
                            linewidth=3,
                            label=sim_data['plot_label'],
                            zorder=1)
                    # ax.plot( k, delta, c=color_line, linewidth=3, label=sim_data['plot_label']  )

        text_pos_x = 0.85
        if scales == 'large_reduced': text_pos_x = 0.15
        ax.text(text_pos_x,
                0.95,
                r'$z={0:.1f}$'.format(current_z),
                horizontalalignment='center',
                verticalalignment='center',
                transform=ax.transAxes,
                fontsize=figure_text_size,
                color=text_color)

        if plot_boss:
            # Add Boss data
            z_diff = np.abs(data_z_boss - current_z)
            diff_min = z_diff.min()
            if diff_min < 1e-1:
                data_index = np.where(z_diff == diff_min)[0][0]
                data_z_local = data_z_boss[data_index]
                data_k = data_boss[data_index]['k_vals']
                data_delta_power = data_boss[data_index]['delta_power']
                data_delta_power_error = data_boss[data_index][
                    'delta_power_error']
                label_boss = 'eBOSS (2019)'
                d_boss = ax.errorbar(data_k,
                                     data_delta_power,
                                     yerr=data_delta_power_error,
                                     fmt='o',
                                     c=c_boss,
                                     label=label_boss,
                                     zorder=2)

        if plot_walther:
            # Add Walther data
            z_diff = np.abs(data_z_w - current_z)
            diff_min = z_diff.min()
            if diff_min < 1e-1:
                data_index = np.where(z_diff == diff_min)[0][0]
                data_z_local = data_z_w[data_index]
                data_k = data_walther[data_index]['k_vals']
                data_delta_power = data_walther[data_index]['delta_power']
                data_delta_power_error = data_walther[data_index][
                    'delta_power_error']
                label_walther = 'Walther et al. (2018)'
                d_walther = ax.errorbar(data_k,
                                        data_delta_power,
                                        yerr=data_delta_power_error,
                                        fmt='o',
                                        c=c_walther,
                                        label=label_walther,
                                        zorder=2)

        # Add Irsic data
        if plot_irsic:
            z_diff = np.abs(data_z_irsic - current_z)
            diff_min = z_diff.min()
            if diff_min < 1e-1:
                data_index = np.where(z_diff == diff_min)[0][0]
                data_z_local = data_z_irsic[data_index]

                data_k = data_irsic[data_index]['k_vals']
                data_delta_power = data_irsic[data_index]['delta_power']
                data_delta_power_error = data_irsic[data_index][
                    'delta_power_error']
                label_irsic = 'Irsic et al. (2017)'
                d_irsic = ax.errorbar(data_k,
                                      data_delta_power,
                                      yerr=data_delta_power_error,
                                      fmt='o',
                                      c=c_irsic,
                                      label=label_irsic,
                                      zorder=2)

        if plot_boera:
            # Add Boera data
            z_diff = np.abs(data_z_b - current_z)
            diff_min = z_diff.min()
            if diff_min < 1e-1:
                data_index = np.where(z_diff == diff_min)[0][0]
                data_z_local = data_z_b[data_index]
                data_k = data_boera[data_index]['k_vals']
                data_delta_power = data_boera[data_index]['delta_power']
                data_delta_power_error = data_boera[data_index][
                    'delta_power_error']
                label_boera = 'Boera et al. (2019)'
                d_boera = ax.errorbar(data_k,
                                      data_delta_power,
                                      yerr=data_delta_power_error,
                                      fmt='o',
                                      c=c_boera,
                                      label=label_boera,
                                      zorder=2)

        if plot_viel:
            # Add Viel data
            z_diff = np.abs(data_z_v - current_z)
            diff_min = z_diff.min()
            if diff_min < 1e-1:
                data_index = np.where(z_diff == diff_min)[0][0]
                data_z_local = data_z_v[data_index]
                data_k = data_viel[data_index]['k_vals']
                data_delta_power = data_viel[data_index]['delta_power']
                data_delta_power_error = data_viel[data_index][
                    'delta_power_error']
                label_viel = 'Viel et al. (2013)'
                d_viel = ax.errorbar(data_k,
                                     data_delta_power,
                                     yerr=data_delta_power_error,
                                     fmt='o',
                                     c=c_viel,
                                     label=label_viel,
                                     zorder=2)

        legend_loc = 3
        if indx_i == nrows - 1 and nrows != 2: legend_loc = 2

        if scales == 'large': legend_loc = 2
        if scales == 'middle': legend_loc = 2
        if scales == 'small_highz': legend_loc = 3
        if scales == 'large_middle': legend_loc = 2
        if scales == 'large_reduced': legend_loc = 4

        label_bars = r'1$\sigma$ skewers $P\,(\Delta_F^2)$'

        add_legend = False
        if indx_j == 0: add_legend = True

        # if scales == 'middle' and indx_i == nrows-1 and indx_j == ncols-1: add_legend = True

        if add_legend:
            # leg = ax.legend( loc=legend_loc, frameon=False, fontsize=12)
            leg = ax.legend(loc=legend_loc, frameon=False, prop=prop)

            for text in leg.get_texts():
                plt.setp(text, color=text_color)

        x_min, x_max = 4e-3, 2.5e-1
        if indx_i == 0: y_min, y_max = 1e-3, 9e-2
        if indx_i == 1: y_min, y_max = 5e-3, 2e-1
        if indx_i == 2: y_min, y_max = 5e-2, 3

        if scales == 'large':
            x_min, x_max = 2e-3, 2.3e-2
            if indx_i == 0: y_min, y_max = 1e-2, 1.2e-1
            if indx_i == 1: y_min, y_max = 2e-2, 2.5e-1
            if indx_i == 2: y_min, y_max = 5e-2, 7e-1

        if scales == 'middle':
            x_min, x_max = 2e-3, 7e-2
            if indx_i == 0: y_min, y_max = 1.8e-2, 2.5e-1
            if indx_i == 1: y_min, y_max = 5e-2, 7e-1

        if scales == 'all':
            x_min, x_max = 4e-3, 2.5e-1
            if indx_i == 0: y_min, y_max = 1e-3, 9e-2
            if indx_i == 1: y_min, y_max = 5e-3, 2e-1
            if indx_i == 2: y_min, y_max = 5e-2, 3

        if scales == 'small_highz':
            x_min, x_max = 2e-3, 3e-1
            if indx_i == 0: y_min, y_max = 5e-3, 1e0

        if scales == 'large_middle':
            x_min, x_max = 2e-3, 7e-2
            if indx_i == 0: y_min, y_max = 2e-2, 1.5e-1
            if indx_i == 1: y_min, y_max = 4e-2, 3.5e-1

        if scales == 'large_reduced':
            x_min, x_max = 2e-3, 2.5e-2
            if indx_i == 0: y_min, y_max = 1.2e-2, 1.3e-1
            if indx_i == 1: y_min, y_max = 2.5e-2, 4e-1

        if scales == 'large_reduced':
            x_min, x_max = 2e-3, 2.5e-2
            if indx_i == 0: y_min, y_max = 1.2e-2, 1.3e-1
            if indx_i == 1: y_min, y_max = 2.5e-2, 4e-1

        if scales == 'small_reduced':
            x_min, x_max = 4e-3, 2e-1
            if indx_i == 0: y_min, y_max = 4e-2, 7e-1

        if high_z_only: y_min, y_max = 5e-2, 3

        ax.set_xlim(x_min, x_max)
        ax.set_ylim(y_min, y_max)
        ax.set_xscale('log')
        ax.set_yscale('log')

        [sp.set_linewidth(border_width) for sp in ax.spines.values()]

        ax.tick_params(axis='both',
                       which='major',
                       color=text_color,
                       labelcolor=text_color,
                       labelsize=tick_label_size_major,
                       size=tick_size_major,
                       width=tick_width_major,
                       direction='in')
        ax.tick_params(axis='both',
                       which='minor',
                       color=text_color,
                       labelcolor=text_color,
                       labelsize=tick_label_size_minor,
                       size=tick_size_minor,
                       width=tick_width_minor,
                       direction='in')

        if indx_i != nrows - 1: ax.set_xticklabels([])
        if indx_j > 0:
            ax.set_yticklabels([])
            ax.tick_params(axis='y', which='minor', labelsize=0)

        if indx_j == 0:
            ax.set_ylabel(r' $\Delta_F^2(k)$',
                          fontsize=label_size,
                          color=text_color)
        if indx_i == nrows - 1:
            ax.set_xlabel(r'$ k   \,\,\,  [\mathrm{s}\,\mathrm{km}^{-1}] $',
                          fontsize=label_size,
                          color=text_color)

        if black_background:
            fig.patch.set_facecolor('black')
            ax.set_facecolor('k')
            [
                spine.set_edgecolor(text_color)
                for spine in list(ax.spines.values())
            ]

    if scales == 'middle':
        for i in range(nrows):
            for j in range(ncols):
                if not flags[i, j]:
                    ax = ax_l[i][j].axis('off')

    fileName = output_dir + f'flux_ps_grid_{scales}'
    if plot_ps_normalized: fileName += f'_{name}'
    if high_z_only: fileName += '_highZ'
    fileName += '.png'
    # fileName += '.pdf'
    fig.savefig(fileName,
                pad_inches=0.1,
                bbox_inches='tight',
                dpi=fig_dpi,
                facecolor=fig.get_facecolor())
    print('Saved Image: ', fileName)
def Get_Comparable_Power_Spectrum(ps_data_dir,
                                  z_min,
                                  z_max,
                                  data_sets,
                                  ps_range,
                                  log_ps=False,
                                  rescaled_walther=False,
                                  rescale_walter_file=None):
    print(f'Loading P(k) Data:')
    dir_boss = ps_data_dir + 'data_power_spectrum_boss/'
    data_filename = dir_boss + 'data_table.py'
    data_boss = load_data_boss(data_filename)

    dir_irsic = ps_data_dir + 'data_power_spectrum_irsic_2017/'
    data_filename = dir_irsic + 'data_table.py'
    data_irsic = load_data_irsic(data_filename)

    data_filename = ps_data_dir + 'data_power_spectrum_walther_2019/data_table.txt'
    data_walther = load_power_spectrum_table(data_filename)

    dir_data_boera = ps_data_dir + 'data_power_spectrum_boera_2019/'
    data_boera = load_tabulated_data_boera(dir_data_boera)

    data_dir_viel = ps_data_dir + 'data_power_spectrum_viel_2013/'
    data_viel = load_tabulated_data_viel(data_dir_viel)

    data_dir = {
        'Boss': data_boss,
        'Walther': data_walther,
        'Boera': data_boera,
        'Viel': data_viel,
        'Irsic': data_irsic
    }

    data_kvals, data_ps, data_ps_sigma, data_indices, data_z  = [], [], [], [], []
    log_data_ps, log_data_ps_sigma = [], []
    sim_z, sim_kmin, sim_kmax = ps_range['z'], ps_range['k_min'], ps_range[
        'k_max']

    ps_data = {}
    data_id = 0
    for data_index, data_name in enumerate(data_sets):
        print(f' Loading P(k) Data: {data_name}')
        data_set = data_dir[data_name]
        keys = data_set.keys()
        n_indices = len(keys) - 1
        if data_name == 'Walther':
            if rescaled_walther:
                print(
                    f" Loading Walther rescale values: {rescale_walter_file}")
                rescale_walter_alphas = Load_Pickle_Directory(
                    rescale_walter_file)
        for index in range(n_indices):
            data = data_set[index]
            z = data['z']
            if z >= z_min and z <= z_max:
                diff = np.abs(sim_z - z)
                id_min = np.where(diff == diff.min())[0][0]
                z_sim = sim_z[id_min]
                kmin = sim_kmin[id_min]
                kmax = sim_kmax[id_min]
                k_vals = data['k_vals']
                k_indices = np.where((k_vals >= kmin) & (k_vals <= kmax))
                k_vals = k_vals[k_indices]
                delta_ps = data['delta_power'][k_indices]
                delta_ps_sigma = data['delta_power_error'][k_indices]
                log_delta_ps = np.log(delta_ps)
                log_delta_ps_sigma = 1 / delta_ps * delta_ps_sigma
                if data_name == 'Walther' and rescaled_walther:
                    rescale_z = rescale_walter_alphas[index]['z']
                    rescale_alpha = rescale_walter_alphas[index]['alpha']
                    print(
                        f'  Rescaling z={rescale_z:.1f}    alpha={rescale_alpha:.3f} '
                    )
                    delta_ps *= rescale_alpha
                ps_data[data_id] = {
                    'z': z,
                    'k_vals': k_vals,
                    'delta_ps': delta_ps,
                    'delta_ps_sigma': delta_ps_sigma
                }
                data_z.append(z)
                data_kvals.append(k_vals)
                data_ps.append(delta_ps)
                data_ps_sigma.append(delta_ps_sigma)
                log_data_ps.append(log_delta_ps)
                log_data_ps_sigma.append(log_delta_ps_sigma)
                data_id += 1
    k_vals_all = np.concatenate(data_kvals)
    delta_ps_all = np.concatenate(data_ps)
    delta_ps_sigma_all = np.concatenate(data_ps_sigma)
    log_delta_ps_all = np.concatenate(log_data_ps)
    log_delta_ps_sigma_all = np.concatenate(log_data_ps_sigma)
    ps_data_out = {'P(k)': {}, 'separate': ps_data}
    ps_data_out['P(k)']['k_vals'] = k_vals_all
    if log_ps:
        ps_data_out['P(k)']['mean'] = log_delta_ps_all
        ps_data_out['P(k)']['sigma'] = log_delta_ps_sigma_all
    else:
        ps_data_out['P(k)']['mean'] = delta_ps_all
        ps_data_out['P(k)']['sigma'] = delta_ps_sigma_all

    n_data_points = len(k_vals_all)
    print(f' N data points: {n_data_points}')
    return ps_data_out
#Box parameters
Lbox = 50.0  #Mpc/h
nPoints = 2048
nx = nPoints
ny = nPoints
nz = nPoints
ncells = nx * ny * nz

dir_boss = 'data/data_power_spectrum_boss/'
data_filename = dir_boss + 'data_table.py'
data_boss = load_data_boss(data_filename)
data_z_boss = data_boss['z_vals']

data_filename = 'data/data_power_spectrum_walther_2019/data_table.txt'
data_walther = load_power_spectrum_table(data_filename)
data_z_w = data_walther['z_vals']

dir_data_boera = 'data/data_power_spectrum_boera_2019/'
data_boera = load_tabulated_data_boera(dir_data_boera)
data_z_b = data_boera['z_vals']

data_dir_viel = 'data/data_power_spectrum_viel_2013/'
data_viel = load_tabulated_data_viel(data_dir_viel)
data_z_v = data_viel['z_vals']

snapshots_indices = [
    83,
    90,
    96,
    102,
Пример #4
0
def Get_Comparable_Power_Spectrum(ps_data_dir, z_min, z_max, data_sets,
                                  ps_range):
    print(f'Loading P(k) Data:')
    dir_boss = ps_data_dir + 'data_power_spectrum_boss/'
    data_filename = dir_boss + 'data_table.py'
    data_boss = load_data_boss(data_filename)

    data_filename = ps_data_dir + 'data_power_spectrum_walther_2019/data_table.txt'
    data_walther = load_power_spectrum_table(data_filename)

    dir_data_boera = ps_data_dir + 'data_power_spectrum_boera_2019/'
    data_boera = load_tabulated_data_boera(dir_data_boera)

    data_dir_viel = ps_data_dir + 'data_power_spectrum_viel_2013/'
    data_viel = load_tabulated_data_viel(data_dir_viel)

    data_dir = {
        'Boss': data_boss,
        'Walther': data_walther,
        'Boera': data_boera,
        'Viel': data_viel
    }


    data_kvals, data_ps, data_ps_sigma, data_indices, data_z  = [], [], [], [], []

    sim_z, sim_kmin, sim_kmax = ps_range['z'], ps_range['k_min'], ps_range[
        'k_max']

    ps_data = {}
    data_id = 0
    for data_index, data_name in enumerate(data_sets):
        print(f' Loading P(k) Data: {data_name}')
        data_set = data_dir[data_name]
        keys = data_set.keys()
        n_indices = len(keys) - 1
        for index in range(n_indices):
            data = data_set[index]
            z = data['z']
            if z >= z_min and z <= z_max:
                diff = np.abs(sim_z - z)
                id_min = np.where(diff == diff.min())[0][0]
                z_sim = sim_z[id_min]
                # print( f'data_z: {z:.1f} sim_z: {z_sim:.1f}')
                kmin = sim_kmin[id_min]
                kmax = sim_kmax[id_min]
                k_vals = data['k_vals']
                k_indices = np.where((k_vals >= kmin) & (k_vals <= kmax))
                k_vals = k_vals[k_indices]
                delta_ps = data['delta_power'][k_indices]
                delta_ps_sigma = data['delta_power_error'][k_indices]
                ps_data[data_id] = {
                    'z': z,
                    'k_vals': k_vals,
                    'delta_ps': delta_ps,
                    'delta_ps_sigma': delta_ps_sigma
                }
                data_z.append(z)
                data_kvals.append(k_vals)
                data_ps.append(delta_ps)
                data_ps_sigma.append(delta_ps_sigma)
                data_id += 1
    k_vals_all = np.concatenate(data_kvals)
    delta_ps_all = np.concatenate(data_ps)
    delta_ps_sigma_all = np.concatenate(data_ps_sigma)
    ps_data_out = {'P(k)': {}, 'separate': ps_data}
    ps_data_out['P(k)']['k_vals'] = k_vals_all
    ps_data_out['P(k)']['mean'] = delta_ps_all
    ps_data_out['P(k)']['sigma'] = delta_ps_sigma_all

    n_data_points = len(k_vals_all)
    print(f' N data points: {n_data_points}')
    return ps_data_out