示例#1
0
def pixel_tuning_bias(pixel,
                      tuning_filename,
                      z_values,
                      d_delta=10**-9,
                      d_eta=10**-2,
                      z_width=0.2,
                      a_v=1.0):

    dirname = utils.get_dir_name(base_dir, pixel)
    gaussian_filename = utils.get_file_name(dirname,
                                            'gaussian-colore',
                                            N_side,
                                            pixel,
                                            compressed=True)
    file_number = None
    pixel_object = simulation_data.SimulationData.get_skewers_object(
        gaussian_filename,
        file_number,
        input_format,
        SIGMA_G=measured_SIGMA_G,
        IVAR_cutoff=IVAR_cutoff)

    b, b_eta = bias_tuning(pixel_object,
                           tuning_filename,
                           z_values,
                           d_delta=d_delta,
                           d_eta=d_eta,
                           z_width=z_width)

    return (b, b_eta)
示例#2
0
 def get_pixel_P1D(pixel, file_type='image'):
     if file_type == 'image':
         dirname = utils.get_dir_name(args.base_dir, pixel)
         filename = utils.get_file_name(dirname,
                                        'picca-' + args.quantity,
                                        args.nside,
                                        pixel,
                                        compressed=args.compressed_input)
         h = fits.open(filename)
         delta_rows = h[0].data.T
         ivar_rows = h[1].data.T
         z = 10**(h[2].data) / lya - 1
         h.close()
     elif file_type == 'delta':
         filename = args.base_dir + '/delta-{}.fits.gz'.format(pixel)
         h = fits.open(filename)
         for hdu in h[1:]:
             delta_rows = None
     Pk1D_results = {}
     for z_value in args.z_values:
         k, Pk, var = Pk1D.get_Pk1D(delta_rows,
                                    ivar_rows,
                                    dr_hMpc,
                                    z,
                                    z_value=z_value,
                                    z_width=args.z_width,
                                    units=args.units,
                                    R1=args.smoothing_radius,
                                    gaussian=gaussian)
         Pk1D_results[z_value] = {'k': k, 'Pk': Pk, 'var': var}
     return (pixel, Pk1D_results)
示例#3
0
 def get_DLA_data(pixel):
     dirname = utils.get_dir_name(base_dir, pixel)
     filename = utils.get_file_name(dirname,
                                    'transmission',
                                    N_side,
                                    pixel,
                                    compressed=compressed_input)
     DLA_data = DLA.get_DLA_data_from_transmission(pixel, filename)
     return DLA_data
示例#4
0
 def modify_header(pixel):
     location = utils.get_dir_name(args.out_dir, pixel)
     filename = utils.get_file_name(location,
                                    'gaussian-colore',
                                    args.nside,
                                    pixel,
                                    compressed=args.compress)
     h = fits.open(filename)
     for HDU in h[1:]:
         HDU.header['SIGMA_G'] = SIGMA_G_global
     h.writeto(filename, overwrite=True)
     h.close()
     return
示例#5
0
def renormalise_pixel(pixel):

    dirname = utils.get_dir_name(basedir, pixel)
    filepath = utils.get_file_name(
        dirname, 'picca-flux-notnorm-rebin-{}-new'.format(N_merge), 16, pixel)
    h = fits.open(filepath)
    skewer_delta_rows = h[0].data.T[:, cells] / mean_F[cells] - 1

    hdu_deltas_new = fits.PrimaryHDU(data=skewer_delta_rows.T,
                                     header=h[0].header)
    hdu_iv_new = h[1]
    hdu_LOGLAM_MAP_new = h[2]
    hdu_CATALOG_new = h[3]

    hdulist = fits.HDUList(
        [hdu_deltas_new, hdu_iv_new, hdu_LOGLAM_MAP_new, hdu_CATALOG_new])
    out_filepath = utils.get_file_name(
        dirname, 'picca-flux-rebin-{}-new'.format(N_merge), 16, pixel)
    hdulist.writeto(out_filepath, overwrite=overwrite)
    hdulist.close()
    h.close()

    return
示例#6
0
    def get_statistics(pixel):
        dirname = utils.get_dir_name(base_dir, pixel)

        #Open up the statistics file without RSDs and extract data.
        s_noRSD_filename = utils.get_file_name(dirname,
                                               'statistics-noRSD',
                                               N_side,
                                               pixel,
                                               compressed=compressed_input)
        s_noRSD = fits.open(s_noRSD_filename)
        statistics_noRSD = s_noRSD[1].data
        s_noRSD.close()

        #Open up the statistics file with RSDs and extract data.
        s_filename = utils.get_file_name(dirname,
                                         'statistics',
                                         N_side,
                                         pixel,
                                         compressed=compressed_input)
        s = fits.open(s_filename)
        statistics = s[1].data
        s.close()

        return statistics_noRSD, statistics
示例#7
0
def pixelise_colore_output(pixel, colore_base_filename, z_min, out_dir,
                           N_side):

    #Define the output directory the pixel, according to the new file structure.
    location = utils.get_dir_name(out_dir, pixel)

    #Make file into an object
    pixel_object = simulation_data.make_pixel_object(
        pixel,
        colore_base_filename,
        args.file_format,
        args.skewer_type,
        shared_MOCKID_lookup,
        IVAR_cutoff=args.rest_frame_weights_cut)

    # TODO: These could be made beforehand and passed to the function? Or is there already enough being passed?
    #Make some useful headers
    header = fits.Header()
    header['HPXNSIDE'] = N_side
    header['HPXPIXEL'] = pixel
    header['HPXNEST'] = True
    header['LYA'] = utils.lya_rest

    ## Save the pixelised colore file.
    filename = utils.get_file_name(location,
                                   '{}-colore'.format(args.skewer_type),
                                   N_side, pixel)
    pixel_object.save_as_colore(args.skewer_type,
                                filename,
                                header,
                                overwrite=args.overwrite,
                                compress=args.compress)

    if args.skewer_type == 'gaussian':
        pixel_object.compute_SIGMA_G(type='single_value',
                                     lr_max=args.rest_frame_weights_cut)
        header['SIGMA_G'] = pixel_object.SIGMA_G
        N = np.sum(pixel_object.IVAR_rows.astype('int'))

        return (N, pixel_object.SIGMA_G)

    else:

        return
示例#8
0
def pixel_tuning_bias(pixel, tuning_filename, z_values, d=0.001, z_width=0.2):

    dirname = utils.get_dir_name(base_dir, pixel)
    gaussian_filename = utils.get_file_name(dirname, 'gaussian-colore', N_side,
                                            pixel)[:-3]
    file_number = None
    pixel_object = simulation_data.SimulationData.get_skewers_object(
        gaussian_filename,
        file_number,
        input_format,
        SIGMA_G=measured_SIGMA_G,
        IVAR_cutoff=IVAR_cutoff)
    bins = np.linspace(0., 1., N_bins + 1)

    histograms = pdf_tuning(pixel_object,
                            tuning_filename,
                            z_values,
                            z_width=z_width,
                            bins=bins)

    return histograms
示例#9
0
def produce_final_skewers(base_out_dir, pixel, N_side, lambda_min,
                          tuning_file):

    t = time.time()

    # Define a random seed for use in this pixel.
    seed = int(pixel * 10**5 + args.seed)

    #We work from the gaussian colore files made in 'pixelise gaussian skewers'.
    location = utils.get_dir_name(base_out_dir, pixel)
    gaussian_filename = utils.get_file_name(location,
                                            '{}-colore'.format(
                                                args.skewer_type),
                                            N_side,
                                            pixel,
                                            compressed=args.compress)

    # Make a pixel object from it.
    file_number = None
    pixel_object = simulation_data.SimulationData.get_skewers_object(
        gaussian_filename,
        file_number,
        args.file_format,
        args.skewer_type,
        IVAR_cutoff=args.rest_frame_weights_cut)
    if args.skewer_type == 'gaussian':
        pixel_object.SIGMA_G = SIGMA_G_global

    # Make a transformation object and add it to the pixel object.
    pixel_object.add_transformation_from_file(tuning_file)

    #Scale the velocities.
    pixel_object.scale_velocities(use_transformation=True)

    #print('{:3.2f} checkpoint object'.format(time.time()-t)); t = time.time()

    #Add Lyb and metal absorbers if needed.
    if args.add_Lyb:
        pixel_object.setup_Lyb_absorber()
    if args.add_metals:
        pixel_object.setup_metal_absorbers(selection=args.metals_selection,
                                           metals_list=args.metals_list)

    #Make some useful headers
    header = fits.Header()
    header['HPXNSIDE'] = N_side
    header['HPXPIXEL'] = pixel
    header['HPXNEST'] = True
    header['LYA'] = utils.lya_rest
    if args.skewer_type == 'gaussian':
        header['SIGMA_G'] = pixel_object.SIGMA_G

    #Save CoLoRe format files.
    if args.transmission_only == False:
        if args.skewer_type == 'gaussian':
            pixel_object.compute_physical_skewers()
        filename = utils.get_file_name(location, 'density-colore', N_side,
                                       pixel)
        pixel_object.save_as_colore('density',
                                    filename,
                                    header,
                                    overwrite=args.overwrite,
                                    compress=args.compress)

    #Trim the skewers (remove low lambda cells). Exit if no QSOs are left.
    #We don't cut too tightly on the low lambda to allow for RSDs.
    lambda_buffer = 100.  #A
    pixel_object.trim_skewers(lambda_min - lambda_buffer,
                              args.min_cat_z,
                              extra_cells=1)
    if pixel_object.N_qso == 0:
        print('\nwarning: no objects left in pixel {} after trimming.'.format(
            pixel))
        return pixel

    #Save picca format files without adding small scale power.
    if args.transmission_only == False:
        if args.skewer_type == 'gaussian':
            filename = utils.get_file_name(location,
                                           'picca-gaussian-colorecell', N_side,
                                           pixel)
            pixel_object.save_as_picca_delta('gaussian',
                                             filename,
                                             header,
                                             overwrite=args.overwrite,
                                             add_QSO_RSDs=args.add_QSO_RSDs,
                                             compress=args.compress)

        filename = utils.get_file_name(location, 'picca-density-colorecell',
                                       N_side, pixel)
        pixel_object.save_as_picca_delta('density',
                                         filename,
                                         header,
                                         overwrite=args.overwrite,
                                         add_QSO_RSDs=args.add_QSO_RSDs,
                                         compress=args.compress)

    #print('{:3.2f} checkpoint colore files'.format(time.time()-t)); t = time.time()

    #Add a table with DLAs in to the pixel object.
    # TODO: in future, we want DLAs all the way down to z=0.
    #That means we need to store skewers all the way down to z=0.
    #May need to adjust how many nodes are used when running.
    if args.add_DLAs:
        pixel_object.add_DLA_table(seed,
                                   dla_bias=args.DLA_bias,
                                   evol=args.DLA_bias_evol,
                                   method=args.DLA_bias_method)

    #print('{:3.2f} checkpoint DLAs'.format(time.time()-t)); t = time.time()

    #Add small scale power to the gaussian skewers:
    if args.add_small_scale_fluctuations:
        generator = np.random.RandomState(seed)
        pixel_object.add_small_scale_fluctuations(
            args.cell_size,
            generator,
            white_noise=False,
            lambda_min=lambda_min,
            IVAR_cutoff=args.rest_frame_weights_cut,
            use_transformation=True)

        if args.skewer_type == 'gaussian':
            #Remove the 'SIGMA_G' header as SIGMA_G now varies with z, so can't be stored in a header.
            del header['SIGMA_G']

    #print('{:3.2f} checkpoint SSF'.format(time.time()-t)); t = time.time()

    #Recompute physical skewers, and then the tau skewers.
    if args.skewer_type == 'gaussian':
        pixel_object.compute_physical_skewers()
    pixel_object.compute_all_tau_skewers()

    if args.transmission_only == False:

        if args.skewer_type == 'gaussian':
            #Picca Gaussian, small cells
            filename = utils.get_file_name(location, 'picca-gaussian', N_side,
                                           pixel)
            pixel_object.save_as_picca_delta('gaussian',
                                             filename,
                                             header,
                                             overwrite=args.overwrite,
                                             add_QSO_RSDs=args.add_QSO_RSDs,
                                             compress=args.compress)

        #Picca density
        filename = utils.get_file_name(location, 'picca-density', N_side,
                                       pixel)
        pixel_object.save_as_picca_delta('density',
                                         filename,
                                         header,
                                         overwrite=args.overwrite,
                                         add_QSO_RSDs=args.add_QSO_RSDs,
                                         compress=args.compress)

        #Picca tau
        filename = utils.get_file_name(location, 'picca-tau-noRSD-notnorm',
                                       N_side, pixel)
        pixel_object.save_as_picca_delta(
            'tau',
            filename,
            header,
            notnorm=True,
            overwrite=args.overwrite,
            add_QSO_RSDs=args.add_QSO_RSDs,
            compress=args.compress,
            all_absorbers=args.picca_all_absorbers)

        #Picca flux
        filename = utils.get_file_name(location, 'picca-flux-noRSD-notnorm',
                                       N_side, pixel)
        pixel_object.save_as_picca_delta(
            'flux',
            filename,
            header,
            notnorm=True,
            overwrite=args.overwrite,
            add_QSO_RSDs=args.add_QSO_RSDs,
            compress=args.compress,
            all_absorbers=args.picca_all_absorbers)
        """
        ## Disable this for the moment.
        #Save the no RSD statistics file for this pixel.
        filename = utils.get_file_name(location,'statistics-noRSD',N_side,pixel)
        statistics = pixel_object.save_statistics(filename,overwrite=args.overwrite,compress=args.compress,all_absorbers=args.picca_all_absorbers)
        """

    #print('{:3.2f} checkpoint noRSD files'.format(time.time()-t)); t = time.time()

    #Add RSDs from the velocity skewers provided by CoLoRe.
    if args.add_RSDs == True:
        pixel_object.add_all_RSDs(thermal=args.include_thermal_effects)

    #print('{:3.2f} checkpoint RSDs'.format(time.time()-t)); t = time.time()

    #Trim the skewers (remove low lambda cells). Exit if no QSOs are left.
    #We now cut hard at lambda min as RSDs have been implemented.
    pixel_object.trim_skewers(lambda_min, args.min_cat_z, extra_cells=1)
    if pixel_object.N_qso == 0:
        print('\nwarning: no objects left in pixel {} after trimming.'.format(
            pixel))
        return pixel

    #Make a variable containing the new cosmology data.
    new_cosmology = pixel_object.return_cosmology()

    #Save the transmission file.
    filename = utils.get_file_name(location, 'transmission', N_side, pixel)
    pixel_object.save_as_transmission(filename,
                                      header,
                                      overwrite=args.overwrite,
                                      wave_min=args.transmission_lambda_min,
                                      wave_max=args.transmission_lambda_max,
                                      wave_step=args.transmission_delta_lambda,
                                      fmt=args.transmission_format,
                                      add_QSO_RSDs=args.add_QSO_RSDs,
                                      compress=args.compress)

    if args.transmission_only == False and args.add_RSDs == True:
        #Picca tau
        filename = utils.get_file_name(location, 'picca-tau-notnorm', N_side,
                                       pixel)
        pixel_object.save_as_picca_delta(
            'tau',
            filename,
            header,
            notnorm=True,
            overwrite=args.overwrite,
            add_QSO_RSDs=args.add_QSO_RSDs,
            compress=args.compress,
            all_absorbers=args.picca_all_absorbers)

        #Picca flux
        filename = utils.get_file_name(location, 'picca-flux-notnorm', N_side,
                                       pixel)
        pixel_object.save_as_picca_delta(
            'flux',
            filename,
            header,
            notnorm=True,
            overwrite=args.overwrite,
            add_QSO_RSDs=args.add_QSO_RSDs,
            compress=args.compress,
            all_absorbers=args.picca_all_absorbers)
        """
        ## Disable this for the moment.
        #Save the final statistics file for this pixel.
        filename = utils.get_file_name(location,'statistics',N_side,pixel)
        statistics = pixel_object.save_statistics(filename,overwrite=args.overwrite,compress=args.compress,all_absorbers=args.picca_all_absorbers)
        """
    else:
        #If transmission_only is not False, remove the gaussian-colore file.
        os.remove(gaussian_filename)

    #print('{:3.2f} checkpoint RSD files'.format(time.time()-t)); t = time.time()

    return new_cosmology
示例#10
0
    def normalise_and_rebin(pixel):

        #Get the directory name.
        dirname = utils.get_dir_name(base_dir, pixel)

        for N_merge in N_merge_values:
            for i, q in enumerate(type_1_quantities):

                #print('rebinning {} file'.format(q))
                t = time.time()
                #Rebin the files.
                filename = utils.get_file_name(dirname,
                                               'picca-' + q,
                                               N_side,
                                               pixel,
                                               compressed=compressed_input)
                if N_merge > 1:
                    out = utils.get_file_name(
                        dirname, 'picca-' + q + '-rebin-{}'.format(N_merge),
                        N_side, pixel)
                    utils.renorm_rebin_picca_file(filename,
                                                  N_merge=N_merge,
                                                  out_filepath=out,
                                                  overwrite=overwrite)
                #print('--> {:1.3f}s'.format(time.time()-t))

            for i, q in enumerate(type_2_quantities):

                #print('getting stats data')
                t = time.time()
                #Get the old mean, and renormalise.
                # TODO: this use of "stats_quantities" is v ugly
                lookup_name = stats_quantities[i] + '_MEAN'
                #print('--> {:1.3f}s'.format(time.time()-t))

                #print('rebin/renorm-ing {} noRSD file'.format(q))
                t = time.time()
                #Renormalise the files without RSDs.
                #old_mean = s_noRSD[1].data[lookup_name]
                old_mean = None
                new_mean = statistics_noRSD[lookup_name]
                filename = utils.get_file_name(dirname,
                                               'picca-' + q + '-noRSD-notnorm',
                                               N_side,
                                               pixel,
                                               compressed=compressed_input)
                if N_merge == 1:
                    out = utils.get_file_name(dirname, 'picca-' + q + '-noRSD',
                                              N_side, pixel)
                else:
                    out = utils.get_file_name(
                        dirname,
                        'picca-' + q + '-noRSD-rebin-{}'.format(N_merge),
                        N_side, pixel)
                #print(out)
                utils.renorm_rebin_picca_file(filename,
                                              old_mean=old_mean,
                                              new_mean=new_mean,
                                              N_merge=N_merge,
                                              out_filepath=out,
                                              overwrite=overwrite,
                                              compress=compress)
                #print('--> {:1.3f}s'.format(time.time()-t))

                #print('rebin/renorm-ing {} RSD file'.format(q))
                t = time.time()
                #Renormalise the files with RSDs.
                #old_mean = s[1].data[lookup_name]
                old_mean = None
                new_mean = statistics[lookup_name]
                filename = utils.get_file_name(dirname,
                                               'picca-' + q + '-notnorm',
                                               N_side,
                                               pixel,
                                               compressed=compressed_input)
                if N_merge == 1:
                    out = utils.get_file_name(dirname, 'picca-' + q, N_side,
                                              pixel)
                else:
                    out = utils.get_file_name(
                        dirname, 'picca-' + q + '-rebin-{}'.format(N_merge),
                        N_side, pixel)
                #print(out)
                utils.renorm_rebin_picca_file(filename,
                                              old_mean=old_mean,
                                              new_mean=new_mean,
                                              N_merge=N_merge,
                                              out_filepath=out,
                                              overwrite=overwrite,
                                              compress=compress)
                #print('--> {:1.3f}s'.format(time.time()-t))

        return
示例#11
0
        'c': colours[3],
        'ls': '-'
    },
}

#Deduced variables.
N_stages = len(stages_1)
N_types = len(plot_types)
dirname = utils.get_dir_name(basedir, pixel)

files_1 = []
files_2 = []
for i in range(N_stages):
    filename = utils.get_file_name(dirname,
                                   stages_1[i],
                                   N_side,
                                   pixel,
                                   compressed=True)
    h = fits.open(filename)
    files_1 += [h]
    if stages_2[i] is not None:
        filename = utils.get_file_name(dirname,
                                       stages_2[i],
                                       N_side,
                                       pixel,
                                       compressed=True)
        h = fits.open(filename)
        files_2 += [h]
    else:
        files_2 += [None]
示例#12
0
def convert_picca_tau_to_flux(pixel):
    #for each pixel
    dirname = utils.get_dir_name(basedir, pixel)
    filepath = utils.get_file_name(dirname, 'picca-tau-notnorm', 16, pixel)
    h = fits.open(filepath)

    #Get data
    header = h[0].header
    skewer_rows = h[0].data.T
    IVAR_rows = h[1].data.T
    LOGLAM_MAP = h[2].data
    CATALOG = h[3].data

    #Rebin data
    skewer_rows = utils.merge_cells(skewer_rows, N_merge)
    IVAR_rows = (utils.merge_cells(IVAR_rows, N_merge) == 1).astype('float32')
    LOGLAM_MAP = np.log10(utils.merge_cells(10**LOGLAM_MAP, N_merge))

    Z = (10**LOGLAM_MAP / utils.lya_rest) - 1

    #Filter out QSOs with less than the min number of relevant cells.
    relevant_QSOs = (np.sum(IVAR_rows, axis=1) > min_number_cells)
    skewer_rows = skewer_rows[relevant_QSOs, :]
    IVAR_rows = IVAR_rows[relevant_QSOs, :]
    CATALOG = CATALOG[relevant_QSOs]

    #Exponentiate to flux.
    skewer_rows = np.exp(-skewer_rows)

    #Get the average.
    mean_F = np.average(skewer_rows, weights=IVAR_rows + small, axis=0)
    weights = np.sum(IVAR_rows, axis=0)
    """
    #Convert from F to delta_F.
    s = fits.open(statistics_file)
    mean_F_full = s[1].data['F_MEAN']
    mean_F_full_z = s[1].data['z']
    mean_F = np.interp(Z,mean_F_full_z,mean_F_full)
    #mean_F = utils.merge_cells(mean_F,N_merge)
    for i in range(skewer_rows.shape[0]):
        cells = Z < CATALOG['Z'][i]
        skewer_rows[i,cells] /= mean_F[cells]
    skewer_rows -= 1.
    s.close()
    """

    #Reconstruct the non-delta HDUs.
    hdu_deltas_new = fits.PrimaryHDU(data=skewer_rows.T, header=header)
    hdu_iv_new = fits.ImageHDU(data=IVAR_rows.T, header=h[1].header, name='IV')
    hdu_LOGLAM_MAP_new = fits.ImageHDU(data=LOGLAM_MAP,
                                       header=h[2].header,
                                       name='LOGLAM_MAP')
    hdu_CATALOG_new = fits.BinTableHDU(CATALOG,
                                       header=h[3].header,
                                       name='CATALOG')

    hdulist = fits.HDUList(
        [hdu_deltas_new, hdu_iv_new, hdu_LOGLAM_MAP_new, hdu_CATALOG_new])
    out_filepath = utils.get_file_name(
        dirname, 'picca-flux-notnorm-rebin-{}-new'.format(N_merge), 16, pixel)
    hdulist.writeto(out_filepath, overwrite=overwrite)
    hdulist.close()
    h.close()

    return (mean_F, weights)
示例#13
0
def measure_pixel_segment(pixel,C0,C1,C2,texp,D0,D1,D2,n,k1,R_kms,a_v,RSD_weights,prep=False):

    t = time.time()
    seed = int(pixel * 10**5 + args.seed)

    #print('start pixel {} at {}'.format(pixel,time.ctime()))

    #Get the filename of the gaussian skewer.
    location = utils.get_dir_name(args.base_dir,pixel)
    filename = utils.get_file_name(location,'{}-colore'.format(args.skewer_type),args.nside,pixel,compressed=args.compressed_input)

    #Make a pixel object from it.
    data = simulation_data.SimulationData.get_skewers_object(filename,None,args.file_format,args.skewer_type,IVAR_cutoff=args.lambda_rest_max)
    #print('{:3.2f} checkpoint sim_dat'.format(time.time()-t))
    t = time.time()

    #Get the transformation for the current set of input parameters.
    transformation = tuning.Transformation()
    def f_tau0_z(z):
        return get_parameter(z,C0,C1,C2)
    def f_texp_z(z):
        return get_parameter(z,texp,0.,0.)
    def f_seps_z(z):
        return get_parameter(z,D0,D1,D2)
    transformation.add_zdep_parameters_from_functions(f_tau0_z,f_texp_z,f_seps_z)
    transformation.add_singval_parameters(n=n,k1=k1,R_kms=R_kms,a_v=a_v)
    data.add_transformation(transformation)

    #Scale the RSD skewers.
    data.scale_velocities(use_transformation=True)

    #trim skewers to the minimal length
    lambda_buffer = 100. #Angstroms
    z_lower_cut = np.min(args.z_values) - args.z_width/2.
    z_upper_cut = np.max(args.z_values) + args.z_width/2.
    lambda_min_val = np.min([args.lambda_min,lya*(1 + z_lower_cut)]) - lambda_buffer
    lambda_max_val = lya*(1 + z_upper_cut) + lambda_buffer
    data.trim_skewers(lambda_min_val,args.min_cat_z,lambda_max=lambda_max_val,whole_lambda_range=False)

    #Add small scale fluctuations to the skewers.
    generator = np.random.RandomState(seed)
    data.add_small_scale_fluctuations(args.cell_size,generator,white_noise=False,lambda_min=0.0,IVAR_cutoff=args.lambda_rest_max,use_transformation=True,remove_P1D_data=remove_P1D_data)

    #print('{:3.2f} checkpoint extra power'.format(time.time()-t))
    t = time.time()

    #If needed, compute the physical skewers
    if args.skewer_type == 'gaussian':
        data.compute_physical_skewers()

    #Compute the tau skewers and add RSDs
    data.compute_tau_skewers(data.lya_absorber)
    #print('{:3.2f} checkpoint tau'.format(time.time()-t))
    t = time.time()

    if prep:
        data.compute_RSD_weights(thermal=False)

        #print(pixel,'{:3.2f} checkpoint RSD weights measured'.format(time.time()-t))
        t = time.time()

        #b_eta_weights_dict = data.get_bias_eta_RSD_weights(args.z_values,d=d_eta,z_width=args.z_width,lambda_buffer=lambda_buffer)
        b_eta_weights_dict = None

        #print(pixel,'{:3.2f} checkpoint b_eta weights measured'.format(time.time()-t))
        t = time.time()

        return (pixel,data.RSD_weights,b_eta_weights_dict)
    else:
        RSD_weights = RSD_weights_dict[pixel]
        bias_eta_weights = bias_eta_weights_dict[pixel]
        data.add_all_RSDs(thermal=False,weights=RSD_weights)

        #Compute and store the transmission skewers
        data.store_all_transmission_skewers()

        #print('{:3.2f} checkpoint RSDs'.format(time.time()-t))
        t = time.time()

        measurements = []
        times_m = np.zeros(6)
        for z_value in args.z_values:
            ID = n
            t_m = time.time()
            measurement = tuning.function_measurement(ID,z_value,args.z_width,data.N_qso,n,k1,C0,C1,C2,texp,D0,D1,D2,pixels=[pixel])
            times_m[0] += time.time() - t_m
            t_m = time.time()
            measurement.add_mean_F_measurement(data)
            times_m[1] += time.time() - t_m
            t_m = time.time()
            measurement.add_Pk1D_measurement(data)
            times_m[2] += time.time() - t_m
            t_m = time.time()
            #measurement.add_sigma_dF_measurement(data)
            times_m[3] += time.time() - t_m
            t_m = time.time()
            measurement.add_bias_delta_measurement(data,d=d_delta,weights=RSD_weights)
            times_m[4] += time.time() - t_m
            t_m = time.time()
            #measurement.add_bias_eta_measurement(data,d=d_eta,weights_dict=bias_eta_weights,lambda_buffer=lambda_buffer)
            times_m[5] += time.time() - t_m
            measurements += [measurement]

        #print('{:3.2f} checkpoint measurements'.format(time.time()-t))
        #print('--> measurement_times: {:3.2f}, {:3.2f}, {:3.2f}, {:3.2f}, {:3.2f}, {:3.2f}'.format(times_m[0],times_m[1],times_m[2],times_m[3],times_m[4],times_m[5]))

        return measurements