Ejemplo n.º 1
0
def main(ns, ns1, ns2):
    if ns.verbose:
        setup_logging('info')

    print('\n Catalogs are - \n%s\n%s\n' % (ns1, ns2))
    cat1 = read_cat(ns1)
    mesh1 = cat1.to_mesh(interlaced=True,
                         compensated=True,
                         window='tsc',
                         Nmesh=ns.nmesh).paint(mode='complex')
    cat2 = read_cat(ns2)

    if ns.unique_k:
        dk = 0
    else:
        dk = None

    rm = FFTPower(mesh1, second=mesh1, mode='1d', dk=dk)
    nmin = numpy.unique(
        numpy.int32(
            numpy.logspace(numpy.log10(ns.nmin),
                           numpy.log10(ns.nmax),
                           ns.nn,
                           endpoint=True)))
    nmin0 = cat1.comm.allreduce(cat2['Length'].min().compute(), MPI.MIN)
    nmax0 = cat1.comm.allreduce(cat2['Length'].max().compute(), MPI.MAX)
    nmin = nmin[nmin >= nmin0]
    nmin = nmin[nmin < nmax0]

    save_bs(ns.output, 'a-matter', rm)

    r = []
    b = []
    a = []
    for nmin1 in nmin:
        cat2 = read_cat(ns2, nmin1)
        mesh2 = cat2.to_mesh(interlaced=True,
                             compensated=True,
                             window='tsc',
                             Nmesh=ns.nmesh)

        r.append(FFTPower(mesh1, second=mesh2, mode='1d', dk=dk))

        save_bs(ns.output, 'x-nmin-%05d' % nmin1, r[-1])
        bias = fit_bias(r[-1], rm)
        abundance = r[-1].attrs['N2'] / cat2.attrs['BoxSize'][0]**3
        b.append(bias)
        a.append(abundance)
        if cat1.comm.rank == 0:
            print('Bias of N=', nmin1, bias, abundance)

    basename = ns.output.rsplit('.', 1)[0]

    if cat1.comm.rank == 0:
        numpy.savetxt(basename + '-bias.txt', numpy.array([nmin, b, a]).T)

    if ns.with_plot:
        if cat1.comm.rank == 0:
            figure = make_plot(rm, r, nmin)
            figure.savefig(basename + '.png')
Ejemplo n.º 2
0
def main(ns, ns1):
    if ns.verbose:
        setup_logging('info')

    cat1 = read_cat(ns1)
    mesh1 = cat1.to_mesh(interlaced=True, compensated=True, window='tsc', Nmesh=ns.nmesh)

    if ns.output_dataset is None:
        ns.output_dataset = 'N%04d' % ns.nmesh

    mesh1.save(ns.output, dataset=ns.output_dataset)
Ejemplo n.º 3
0
def main(ns, ns1, ns2):

    if ns.output_dataset is None:
        ns.output_dataset = '%sS-HID-%04d' % (ns2.dataset, ns.haloid)

    los = dict(x=[1, 0, 0], y=[0, 1, 0], z=[0, 0, 1])[ns.los]

    if ns.verbose:
        setup_logging('info')

    cat1 = read_cat(ns1)
    cat2 = read_cat(ns2)

    mask = (cat1.Index == ns.haloid).nonzero()[0]
    pos = (cat1['Position'][mask]).compute()
    if len(pos) > 0:
        pos = pos[0]
    else:
        pos = numpy.zeros((3))


    BoxSize = cat2.attrs['BoxSize']

    pos = cat1.comm.allreduce(pos) % BoxSize

    if cat1.comm.rank == 0:
        cat1.logger.info('Center position for halo %d is at %s' % (ns.haloid, str(pos)))

    r = (cat2['Position'] - pos)

    r = r + ((r > BoxSize * 0.5) * -BoxSize
          + (r < -BoxSize * 0.5) * BoxSize)

    r = (r * los).sum(axis=-1) ** 2
    sel = r < (ns.thickness * 0.5) ** 2

    catsel = cat2[sel]

    if cat2.comm.rank == 0:
        cat2.logger.info('Selected %d objects' % catsel.csize)

    catsel.attrs['BoxCenter'] = pos

    columns = sorted(set(catsel.columns) - set(['Weight', 'Selection', 'Value']))

    catsel.save(ns.output, columns=columns, dataset=ns.output_dataset)

    if catsel.comm.rank == 0:
        catsel.logger.info('saved to %s : %s' % (ns.output, ns.output_dataset))
Ejemplo n.º 4
0
def main(ns, ns1, ns2):
    if ns.verbose:
        setup_logging('info')

    cat1 = read_cat(ns1)
    mesh1 = cat1.to_mesh(interlaced=True, compensated=True, window='tsc', Nmesh=ns.nmesh)

    if ns1 is not ns2:
        cat2 = read_cat(ns2)
        mesh2 = cat2.to_mesh(interlaced=True, compensated=True, window='tsc', Nmesh=ns.nmesh)
    else:
        mesh2 = None

    if ns1.with_rsd != ns2.with_rsd:
        warnings.warn("Two catalogs have different with-rsd settings, this may not be intended.")

    if ns.mode is None:
        if ns1.with_rsd or ns2.with_rsd:
            ns.mode = '2d'
        else:
            ns.mode = '1d'

    if ns.unique_k:
        dk = 0
    else:
        dk = None

    r = FFTPower(mesh1, second=mesh2, mode=ns.mode, dk=dk)

    basename = ns.output.rsplit('.', 1)[0]
    if ns.output.endswith('.json'):
        r.save(ns.output)
    elif ns.output.endswith('.txt'):
        if cat1.comm.rank == 0:
            for var in r.power.data.dtype.names:
                numpy.savetxt(basename + '-%s.txt' % var,
                    r.power[var].real
                )

    if ns.with_plot:
        if cat1.comm.rank == 0:
            figure = make_plot(r)
            figure.savefig(basename + '.png')
def main():
    """
    Calculate BOSS window function multipoles.
    
    See https://nbodykit.readthedocs.io/en/latest/api/_autosummary/nbodykit.algorithms.html#nbodykit.algorithms.SurveyDataPairCount

    and 

    https://arxiv.org/pdf/1607.03150.pdf eq 22
    """

    # Parse command line arguments
    ap = ArgumentParser()

    ap.add_argument('--rmin',
                    default=100.0,
                    type=float,
                    help='Number of bins for separation r between pairs.')

    ap.add_argument('--rmax',
                    default=500.0,
                    type=float,
                    help='Number of bins for separation r between pairs.')

    ap.add_argument('--Nr',
                    default=20,
                    type=int,
                    help='Number of bins for separation r between pairs.')

    ap.add_argument('--Nmu',
                    default=10,
                    type=int,
                    help='Number of bins for angle mu w.r.t. line of sight.')

    ap.add_argument('--download_dir',
                    default='$SCRATCH/lss/sdss_data/',
                    help='Where to store/read downloaded data.')

    default_sample = 'DR12v5_CMASSLOWZTOT_South'
    #default_sample = 'DR12v5_LOWZ_South'
    ap.add_argument(
        '--boss_sample',
        default=default_sample,
        help=
        'Which BOSS sample to use. See https://data.sdss.org/sas/dr12/boss/lss/'
    )

    ap.add_argument('--out_dir',
                    default='window/',
                    help='Folder where to store measured window function.')

    ap.add_argument('--out_base',
                    default='paircount',
                    help='Prefix for where to store measured window function.')

    ap.add_argument('--FKP', default=0, type=int, help='Include FKP weight.')

    ap.add_argument('--randoms1_catalog_id',
                    default=0,
                    type=int,
                    help='ID for randoms1 catalog')

    ap.add_argument('--randoms2_catalog_id',
                    default=1,
                    type=int,
                    help='ID for randoms2 catalog')

    ap.add_argument('--subsample_fraction',
                    default=1e-4,
                    type=float,
                    help='If less than 1, use random subsample of randoms.')

    cmd_args = ap.parse_args()

    setup_logging()
    comm = CurrentMPIComm.get()

    # download the data to the current directory
    download_dir = os.path.expandvars(cmd_args.download_dir)
    if comm.rank == 0:
        print('download_dir:', download_dir)
    boss_sample = cmd_args.boss_sample

    if comm.rank == 0:
        download_data(download_dir,
                      boss_sample=boss_sample,
                      random_catalog_id=cmd_args.randoms1_catalog_id)
        download_data(download_dir,
                      boss_sample=boss_sample,
                      random_catalog_id=cmd_args.randoms2_catalog_id)

    # NOTE: change this path if you downloaded the data somewhere else!
    randoms1_path = os.path.join(
        download_dir,
        'random%d_%s.fits' % (cmd_args.randoms1_catalog_id, boss_sample))
    randoms2_path = os.path.join(
        download_dir,
        'random%d_%s.fits' % (cmd_args.randoms2_catalog_id, boss_sample))

    # initialize the FITS catalog objects for data and randoms
    randoms1 = FITSCatalog(randoms1_path)
    randoms2 = FITSCatalog(randoms2_path)

    if comm.rank == 0:
        print('randoms1 columns = ', randoms1.columns)
        print('randoms2 columns = ', randoms2.columns)

    # Select redshift range
    if boss_sample in ['DR12v5_LOWZ_South', 'DR12v5_LOWZ_North']:
        ZMIN = 0.15
        ZMAX = 0.43
    elif boss_sample in ['DR12v5_CMASS_South', 'DR12v5_CMASS_North']:
        ZMIN = 0.43
        ZMAX = 0.7
    elif boss_sample in [
            'DR12v5_CMASSLOWZTOT_South', 'DR12v5_CMASSLOWZTOT_North'
    ]:
        ZMIN = 0.5
        ZMAX = 0.75
    else:
        raise Exception('Must specify ZMIN and ZMAX for boss_sample=%s' %
                        str(boss_sample))

    # slice the randoms
    valid1 = (randoms1['Z'] > ZMIN) & (randoms1['Z'] < ZMAX)
    randoms1 = randoms1[valid1]
    valid2 = (randoms2['Z'] > ZMIN) & (randoms2['Z'] < ZMAX)
    randoms2 = randoms2[valid2]

    if cmd_args.subsample_fraction < 1.0:
        # Create random subsamples
        rng1 = MPIRandomState(randoms1.comm, seed=123, size=randoms1.size)
        rr1 = rng1.uniform(0.0, 1.0, itemshape=(1, ))
        randoms1 = randoms1[rr1[:, 0] < cmd_args.subsample_fraction]
        rng2 = MPIRandomState(randoms2.comm, seed=456, size=randoms2.size)
        rr2 = rng2.uniform(0.0, 1.0, itemshape=(1, ))
        randoms2 = randoms2[rr2[:, 0] < cmd_args.subsample_fraction]

    Nrandoms1 = randoms1.csize
    Nrandoms2 = randoms2.csize
    if comm.rank == 0:
        print('Nrandoms1:', Nrandoms1)
        print('Nrandoms2:', Nrandoms2)

    # weights
    if cmd_args.FKP == 0:
        randoms1['Weight'] = 1.0
        randoms2['Weight'] = 1.0
    else:
        randoms1['Weight'] = randoms1['WEIGHT_FKP']
        randoms2['Weight'] = randoms2['WEIGHT_FKP']

    # the fiducial BOSS DR12 cosmology
    cosmo = cosmology.Cosmology(h=0.676).match(Omega0_m=0.31)

    # bins for separation
    edges_r = np.logspace(np.log10(cmd_args.rmin),
                          np.log10(cmd_args.rmax),
                          num=cmd_args.Nr + 1)
    print('edges_r', edges_r)

    if comm.rank == 0:
        print("Start pair count...")
    paircount = SurveyDataPairCount(mode='2d',
                                    first=randoms1,
                                    second=randoms2,
                                    edges=edges_r,
                                    cosmo=cosmo,
                                    Nmu=cmd_args.Nmu,
                                    pimax=None,
                                    ra='RA',
                                    dec='DEC',
                                    redshift='Z',
                                    weight='Weight',
                                    show_progress=True,
                                    domain_factor=4)

    if comm.rank == 0:
        print("Done pair count")

    if comm.rank == 0:
        print('paircount', paircount)
        for key in paircount.attrs:
            print("%s = %s" % (key, str(paircount.attrs[key])))

    # save results
    if comm.rank == 0:
        if not os.path.exists(cmd_args.out_dir):
            os.makedirs(cmd_args.out_dir)

    # save window to file, in nbodykit format
    out_file_base = os.path.join(
        cmd_args.out_dir,
        '%s_%s_rmin%.1f_rmax%.1f_Nr%d_Nmu%d_randID1%d_randID2%d_SUB%g_FKP%d' %
        (cmd_args.out_base, boss_sample, cmd_args.rmin, cmd_args.rmax,
         cmd_args.Nr, cmd_args.Nmu, cmd_args.randoms1_catalog_id,
         cmd_args.randoms2_catalog_id, cmd_args.subsample_fraction,
         cmd_args.FKP))

    fname = '%s.nbk.dat' % out_file_base
    paircount.save(fname)
    print('Wrote %s' % fname)
def main():
    """
    Measure BOSS power spectrum.
    Based on https://nbodykit.readthedocs.io/en/latest/cookbook/boss-dr12-data.html.
    """

    # Parse command line arguments
    ap = ArgumentParser()

    ap.add_argument('--Nmesh', default=64, type=int,
        help='Nmesh used to compute power spectrum.')

    ap.add_argument('--download_dir', default='$SCRATCH/lss/sdss_data/', 
        help='Where to store/read downloaded data.')

    default_sample = 'DR12v5_CMASSLOWZTOT_South'
    #default_sample = 'DR12v5_LOWZ_South'
    ap.add_argument('--boss_sample', default=default_sample, 
        help='Which BOSS sample to use. See https://data.sdss.org/sas/dr12/boss/lss/')

    ap.add_argument('--out_dir', default='power/', 
        help='Folder where to store measured power spectrum.')

    ap.add_argument('--out_base', default='power', 
        help='Prefix for where to store measured power spectrum.')

    ap.add_argument('--plot', dest='plot', action='store_true', 
        help='Plot power spectra.')

    ap.add_argument('--subtract_shot', dest='subtract_shot', action='store_true',
        help='Subtract shot noise from monopole power spectrum.')

    ap.add_argument('--random_catalog_id', default=0, type=int,
        help='Which random catalog to use.')

    cmd_args = ap.parse_args()


    # Setup things
    if cmd_args.plot:
        from nbodykit import style
        import matplotlib.pyplot as plt
        plt.style.use(style.notebook)

    setup_logging()
    comm = CurrentMPIComm.get()

    # download the data to the current directory
    download_dir = os.path.expandvars(cmd_args.download_dir)
    print('download_dir:', download_dir)
    boss_sample = cmd_args.boss_sample

    if comm.rank == 0:
        download_data(download_dir, boss_sample=boss_sample,
            random_catalog_id=cmd_args.random_catalog_id)

    # NOTE: change this path if you downloaded the data somewhere else!
    data_path = os.path.join(download_dir, 'galaxy_%s.fits' % boss_sample)
    randoms_path = os.path.join(download_dir, 'random%d_%s.fits' % (
        cmd_args.random_catalog_id, boss_sample))

    # initialize the FITS catalog objects for data and randoms
    data = FITSCatalog(data_path)
    randoms = FITSCatalog(randoms_path)

    print('data columns = ', data.columns)
    print('randoms columns = ', randoms.columns)


    # Select redshift range
    if boss_sample in ['DR12v5_LOWZ_South', 'DR12v5_LOWZ_North']:
        ZMIN = 0.15
        ZMAX = 0.43
    elif boss_sample in ['DR12v5_CMASS_South', 'DR12v5_CMASS_North']:
        ZMIN = 0.43
        ZMAX = 0.7
    elif boss_sample in ['DR12v5_CMASSLOWZTOT_South', 'DR12v5_CMASSLOWZTOT_North']:
        ZMIN = 0.5
        ZMAX = 0.75
    else:
        raise Exception('Must specify ZMIN and ZMAX for boss_sample=%s' % str(boss_sample))

    # slice the randoms
    valid = (randoms['Z'] > ZMIN)&(randoms['Z'] < ZMAX)
    randoms = randoms[valid]

    # slice the data
    valid = (data['Z'] > ZMIN)&(data['Z'] < ZMAX)
    data = data[valid]

    Ngalaxies = data.csize
    print('Ngalaxies:', Ngalaxies)

    Nrandoms = randoms.csize
    print('Nrandoms:', Nrandoms)

    # the fiducial BOSS DR12 cosmology
    cosmo = cosmology.Cosmology(h=0.676).match(Omega0_m=0.31)

    # add Cartesian position column
    data['Position'] = transform.SkyToCartesian(data['RA'], data['DEC'], data['Z'], cosmo=cosmo)
    randoms['Position'] = transform.SkyToCartesian(randoms['RA'], randoms['DEC'], randoms['Z'], cosmo=cosmo)


    randoms['WEIGHT'] = 1.0
    data['WEIGHT'] = data['WEIGHT_SYSTOT'] * (data['WEIGHT_NOZ'] + data['WEIGHT_CP'] - 1.0)


    # combine the data and randoms into a single catalog
    fkp = FKPCatalog(data, randoms)


    mesh = fkp.to_mesh(Nmesh=cmd_args.Nmesh, nbar='NZ', fkp_weight='WEIGHT_FKP', comp_weight='WEIGHT', window='tsc')

    # compute the multipoles
    r = ConvolvedFFTPower(mesh, poles=[0,2,4], dk=0.005, kmin=0.)

    for key in r.attrs:
        print("%s = %s" % (key, str(r.attrs[key])))

    # save results
    if not os.path.exists(cmd_args.out_dir):
        os.makedirs(cmd_args.out_dir)

    # save power to file, in nbodykit format
    fname = os.path.join(cmd_args.out_dir, 
        '%s_%s_Nmesh%d.nbk.dat' % (cmd_args.out_base, boss_sample, cmd_args.Nmesh))
    r.save(fname)
    print('Wrote %s' % fname)

    # Also save plain txt file
    poles = r.poles
    Nk = poles['k'].shape[0]
    mat = np.zeros((Nk, 4)) + np.nan
    header = 'boss_sample=%s\n' % boss_sample
    header += 'ZMIN=%g\n' % ZMIN
    header += 'ZMAX=%g\n' % ZMAX
    header += 'Nmesh=%d\n' % cmd_args.Nmesh
    header += 'Ngalaxies=%d\n' % Ngalaxies
    if cmd_args.subtract_shot:
        header += 'subtract_shot=True\n'
    else:
        header += 'subtract_shot=False\n'
    header += 'random_catalog_id=%d\n' % cmd_args.random_catalog_id
    header += 'Columns: k, P_0, P_2, P_4'
    mat[:,0] = poles['k']
    if cmd_args.subtract_shot:
        mat[:,1] = poles['power_0'].real - r.attrs['shotnoise']
    else:
        mat[:,1] = poles['power_0'].real
    mat[:,2] = poles['power_2'].real
    mat[:,3] = poles['power_4'].real
    # save
    out_file_base = os.path.join(cmd_args.out_dir, 
        '%s_%s_Nmesh%d_subtrShot%d_randID%d' % (
            cmd_args.out_base, boss_sample, cmd_args.Nmesh,
            int(cmd_args.subtract_shot==True),
            cmd_args.random_catalog_id
            ))
    fname = out_file_base + '.txt'
    np.savetxt(fname, mat, header=header)
    print('Wrote %s' % fname)

    if cmd_args.plot:
        # run code with --plot to plot
        for ell in [0, 2, 4]:
            label = r'$\ell=%d$' % (ell)
            P = poles['power_%d' %ell].real
            if cmd_args.subtract_shot:
                if ell == 0: P = P - r.attrs['shotnoise']
            plt.plot(poles['k'], poles['k']*P, label=label)

        # format the axes
        plt.legend(loc=0)
        plt.xlabel(r"$k$ [$h \ \mathrm{Mpc}^{-1}$]")
        plt.ylabel(r"$k \ P_\ell$ [$h^{-2} \ \mathrm{Mpc}^2$]")
        plt.xlim(0.01, 0.25)

        fname = out_file_base + '.pdf'
        plt.savefig(fname)
        print('Made %s' % fname)
Ejemplo n.º 7
0
def main():
    """
    Combine source fields to get proxy of a target field. This is stage 0 of 
    reconstruction, and can be used to quantify Perror of a bias model.

    For example:

      - Combine delta_m, delta_m^2, s^2 to get proxy of target=delta_halo.

      - Combine different mass-weighted delta_h fields to get proxy of
        target=delta_m.

    Usage examples:
      ./run.sh python main_calc_Perr_test.py
    or 
      ./run.sh mpiexec -n 4 python main_calc_Perr_test.py --SimSeed 403
    """

    #####################################
    # PARSE COMMAND LINE ARGS
    #####################################
    ap = ArgumentParser()

    ap.add_argument('--SimSeed',
                    type=int,
                    default=403,
                    help='Simulation seed to load.')

    ap.add_argument('--HaloMassString',
                    default='13.8_15.1',
                    help="Halo mass string, for example '13.8_15.1'.")

    cmd_args = ap.parse_args()

    #####################################
    # OPTIONS
    #####################################
    opts = OrderedDict()

    # Bump this when changing code without changing options. Otherwise pickle
    # loading might wrongly read old pickles.
    opts['main_calc_Perr_test_version'] = '1.5'

    # Simulation options. Will be used by path_utils to get input path, and
    # to compute deltalin at the right redshift.
    seed = cmd_args.SimSeed
    opts['sim_opts'] = parameters_ms_gadget.MSGadgetSimOpts.load_default_opts(
        sim_name='ms_gadget_test_data',
        sim_seed=seed,
        halo_mass_string=cmd_args.HaloMassString)

    # Grid options.
    Ngrid = 64
    opts['grid_opts'] = parameters.GridOpts(
        Ngrid=Ngrid,
        kmax=2.0*np.pi/opts['sim_opts'].boxsize * float(Ngrid)/2.0,
        grid_ptcle2grid_deconvolution=None
        )

    # Options for measuring power spectrum. Use defaults.
    opts['power_opts'] = parameters.PowerOpts()

    # Transfer function options. See lsstools.parameters.py for details.
    opts['trf_fcn_opts'] = parameters.TrfFcnOpts(
        Rsmooth_for_quadratic_sources=0.1,
        Rsmooth_for_quadratic_sources2=0.1,
        N_ortho_iter=1,
        orth_method='CholeskyDecomp',
        interp_kind='manual_Pk_k_bins'
        )

    # External grids to load: deltalin, delta_m, shifted grids
    opts['ext_grids_to_load'] = opts['sim_opts'].get_default_ext_grids_to_load(
        Ngrid=opts['grid_opts'].Ngrid)

    # Catalogs to read
    opts['cats'] = opts['sim_opts'].get_default_catalogs()

    # Specify bias expansions to test
    opts['trf_specs'] = []

    # Quadratic Lagrangian bias: delta_Z + b1 deltalin(q+Psi) + b2 
    # [deltalin^2-<deltalin^2>](q+Psi) + bG2 [G2](q+Psi)
    opts['trf_specs'].append(
        TrfSpec(linear_sources=[
            'deltalin_SHIFTEDBY_deltalin',
            'deltalin_growth-mean_SHIFTEDBY_deltalin',
            'deltalin_G2_SHIFTEDBY_deltalin'
        ],
                fixed_linear_sources=['1_SHIFTEDBY_deltalin'],
                field_to_smoothen_and_square=None,
                quadratic_sources=[],
                target_field='delta_h',
                save_bestfit_field=
                'hat_delta_h_from_1_Tdeltalin2G2_SHIFTEDBY_PsiZ'))

    # Save results
    opts['keep_pickle'] = False
    opts['pickle_file_format'] = 'dill'
    opts['pickle_path'] = '$SCRATCH/perr/pickle/'

    # Save some additional power spectra that are useful for plotting later
    opts['Pkmeas_helper_columns'] = [
        'delta_h', 'delta_m', '1_SHIFTEDBY_deltalin', 'deltalin'
    ]

    # Save grids for 2d slice plots and histograms
    opts['save_grids4plots'] = False
    opts['grids4plots_base_path'] = '$SCRATCH/perr/grids4plots/'
    opts['grids4plots_R'] = 0.0  # Gaussian smoothing applied to grids4plots

    # Cache path
    opts['cache_base_path'] = '$SCRATCH/perr/cache/'


    # Run the program given the above opts.
    outdict = model_error.calculate_model_error(opts)


    # Compare vs expected result.
    residual_key = '[hat_delta_h_from_1_Tdeltalin2G2_SHIFTEDBY_PsiZ]_MINUS_[delta_h]'
    Perr = outdict['Pkmeas'][(residual_key, residual_key)].P
    Perr_expected = np.array([
        9965.6, 17175.8, 22744.4, 19472.3, 19081.2, 19503.4, 19564.9,
        18582.9, 19200.1, 16911.3, 16587.4, 16931.9, 15051.0, 13835.1,
        13683.8, 13109.9, 12353.5, 11900.2, 11085.1, 11018.4, 10154.0,
        9840.7, 8960.6, 8484.1, 7942.2, 7426.7, 6987.8, 6578.1, 6269.1,
        5810.7, 5511.7
    ])

    setup_logging()
    comm = CurrentMPIComm.get()
    logger = logging.getLogger('TestPerrCalc')

    if comm.rank == 0:
        Perr_lst = ['%.1f' % a for a in list(Perr)]
        Perr_expected_lst = ['%.1f' % a for a in list(Perr)]
        logger.info('Perr:\n%s' % str(','.join(Perr_lst)))
        logger.info('Expected Perr:\n%s' % str(','.join(Perr_expected_lst)))
        if np.allclose(Perr, Perr_expected, rtol=1e-3):
            logger.info('TEST Perr: OK')
        else:
            logger.info('TEST Perr: FAILED')
            raise Exception('Test failed')
Ejemplo n.º 8
0


from nbodykit.transform import SkyToCartesian
from nbodykit.cosmology import Cosmology
import nbodykit.lab as nb
from nbodykit import setup_logging, style
setup_logging() # turn on logging to screen

#from mpi4py import MPI
#comm = MPI.COMM_WORLD
#size = comm.Get_size()
#rank = comm.Get_rank()   

from nbodykit import CurrentMPIComm
comm = CurrentMPIComm.get()
rank = comm.rank
size = comm.size

if rank ==0:
    import os
    from argparse import ArgumentParser
    ap = ArgumentParser(description='Power Spectrum (NBODYKIT)')
    ap.add_argument('--galaxy_path',  default='/Volumes/TimeMachine/data/eboss/v6/eBOSS_QSO_clustering_NGC_v6.dat.fits')
    ap.add_argument('--random_path',  default='/Volumes/TimeMachine/data/eboss/v6/eBOSS_QSO_clustering_NGC_v6.ran.fits')
    ap.add_argument('--output_path',  default='/Volumes/TimeMachine/data/eboss/v6/results_ngc/clustering/pk_256_p8_2p2.json')
    ap.add_argument('--nmesh',        default=256, type=int)
    ap.add_argument('--zlim',         nargs='*',   type=float, default=[0.8, 2.2])
    ap.add_argument('--sys_tot',      action='store_true')
    ns = ap.parse_args()
Ejemplo n.º 9
0
from runtests.mpi import MPITest
from nbodykit.lab import *
from nbodykit.tutorials import DemoHaloCatalog
from nbodykit import setup_logging
import shutil
import pytest


setup_logging()

@MPITest([1, 4])
def test_no_seed(comm):

    halos = DemoHaloCatalog('bolshoi', 'rockstar', 0.5, comm=comm)
    hod = halos.populate(Zheng07Model)

    # seed is set randomly
    assert hod.attrs['seed'] is not None

@MPITest([1, 4])
def test_bad_model(comm):

    halos = DemoHaloCatalog('bolshoi', 'rockstar', 0.5, comm=comm)
    with pytest.raises(TypeError):
        hod = halos.populate('Zheng07Model')


@MPITest([1, 4])
def test_failed_populate(comm):

    # the demo halos
Ejemplo n.º 10
0
import numpy as np
import re, os, sys, yaml
from pmesh.pm import ParticleMesh
from nbodykit.lab import BigFileCatalog, BigFileMesh, MultipleSpeciesCatalog, FFTPower, FieldMesh
from nbodykit import setup_logging
from mpi4py import MPI

sys.path.append('../utils/')
import HImodels
# enable logging, we have some clue what's going on.
setup_logging('info')

#Get parameter file
cfname = sys.argv[1]

with open(cfname, 'r') as ymlfile:
    args = yaml.load(ymlfile, Loader=yaml.FullLoader)

#
nc = args['nc']
bs = args['bs']
alist = args['alist']
#
#
#Global, fixed things
cosmodef = {'omegam': 0.309167, 'h': 0.677, 'omegab': 0.048}
pm = ParticleMesh(BoxSize=bs, Nmesh=[nc, nc, nc])
rank = pm.comm.rank
comm = pm.comm
if rank == 0: print(args)
Ejemplo n.º 11
0
def main(argv):
    """
    Calc cumulants of simulated density.

    Run using 
      ./run_nbk03.sh python test_linear_term_fisher_nbk03.py
    or 
      ./run_nbk03.sh mpiexec -n 2 python test_linear_term_fisher_nbk03.py
    """

    #####################################
    # OPTIONS
    #####################################

    opts = OrderedDict()

    ## ANALYSIS
    opts['Ngrid'] = 64

    # k bin width for power spectra, in units of k_f=2pi/L. Must be >=1.0. Choose 1.,2.,3. usually.
    opts['k_bin_width'] = 1.0

    if True:
        # L=500 ms_gadget sims produced with MP-Gadget, 1536^3 particles
        opts['sim_name'] = 'ms_gadget'
        opts['sim_irun'] = 4
        # use value from cmd line b/c later options depend on this
        opts['sim_seed'] = 403
        opts['ssseed'] = 40000 + opts[
            'sim_seed']  # seed used to draw subsample
        opts['sim_Ntimesteps'] = None  # Nbody, so used thousands of time steps
        opts['sim_Nptcles'] = 1536
        opts['sim_boxsize'] = 500.0
        opts['boxsize'] = opts['sim_boxsize']
        opts['sim_wig_now_string'] = 'wig'
        # scale factor of simulation snapshot (only used to rescale deltalin -- do not change via arg!)
        opts['sim_scale_factor'] = 0.6250
        # halo mass
        opts['halo_mass_string'] = '13.8_15.1'

        # linear density (ICs of the sims)
        opts['ext_grids_to_load'] = OrderedDict()
        if False:
            # deltanonl painted from all 1536^3 DM particles (includes CIC artifacts, small shot noise)
            opts['ext_grids_to_load']['delta_m'] = {
                'dir':
                'snap_%.4f_PtcleDensity_Ng%d' %
                (opts['sim_scale_factor'], opts['Ngrid']),
                'file_format':
                'nbkit_BigFileGrid',
                'dataset_name':
                'Field',
                'scale_factor':
                opts['sim_scale_factor'],
                'nbkit_normalize':
                True,
                'nbkit_setMean':
                0.0
            }

    # ######################################################################
    # Catalogs to read
    # ######################################################################
    opts['cats'] = OrderedDict()

    if opts['sim_name'] == 'ms_gadget':

        tmp_halo_dir = 'nbkit_fof_%.4f/ll_0.200_nmin25' % opts[
            'sim_scale_factor']
        ## nonuniform catalogs without ptcle masses
        if True:
            # halos without mass weight, narrow mass cuts: 10.8..11.8..12.8..13.8..15.1
            opts['cats']['delta_h'] = {
                'in_fname':
                "%s/fof_nbkfmt.hdf5_BOUNDS_log10M_%s.hdf5" %
                (tmp_halo_dir, opts['halo_mass_string']),
                'weight_ptcles_by':
                None
            }
        if False:
            # halos not weighted by mass but including mass info in file, broad mass cut
            opts['cats']['delta_h'] = {
                'in_fname':
                "%s/fof_nbkfmt.hdf5_WithMassCols.hdf5_BOUNDS_log10M_%s.hdf5" %
                (tmp_halo_dir, opts['halo_mass_string']),
                'weight_ptcles_by':
                None
            }
        if False:
            # halos in narrow mass bins, no mass weights
            opts['cats']['delta_h_M10.8-11.8'] = {
                'in_fname':
                "%s/fof_nbkfmt.hdf5_WithMassCols_BOUNDS_log10M_10.8_11.8.hdf5"
                % tmp_halo_dir,
                'weight_ptcles_by':
                None
            }
            opts['cats']['delta_h_M11.8-12.8'] = {
                'in_fname':
                "%s/fof_nbkfmt.hdf5_WithMassCols_BOUNDS_log10M_11.8_12.8.hdf5"
                % tmp_halo_dir,
                'weight_ptcles_by':
                None
            }
            opts['cats']['delta_h_M12.8-13.8'] = {
                'in_fname':
                "%s/fof_nbkfmt.hdf5_WithMassCols_BOUNDS_log10M_12.8_13.8.hdf5"
                % tmp_halo_dir,
                'weight_ptcles_by':
                None
            }
            opts['cats']['delta_h_M13.8-15.1'] = {
                'in_fname':
                "%s/fof_nbkfmt.hdf5_WithMassCols_BOUNDS_log10M_13.8_15.1.hdf5"
                % tmp_halo_dir,
                'weight_ptcles_by':
                None
            }

        if False:
            # halos in broad mass bins, no mass weights
            opts['cats']['delta_h_M12.8-16.0'] = {
                'in_fname':
                "%s/fof_nbkfmt.hdf5_WithMassCols.hdf5_BOUNDS_log10M_12.8_16.0.hdf5"
                % tmp_halo_dir,
                'weight_ptcles_by':
                None
            }

    else:
        raise Exception("Invalid sim_name %s" % opts['sim_name'])

    if False:
        # halos weighted by exact mass
        opts['cats']['delta_h_WEIGHT_M1'] = {
            'in_fname':
            "%s/fof_nbkfmt.hdf5_WithMassCols.hdf5_BOUNDS_log10M_%s.hdf5" %
            (tmp_halo_dir, opts['halo_mass_string']),
            'weight_ptcles_by':
            'Mass[1e10Msun/h]'
        }
    if False:
        # weighted by exact mass^2
        opts['cats']['delta_h_WEIGHT_M2'] = {
            'in_fname': opts['cats']['delta_h']['in_fname'],
            'weight_ptcles_by': 'Mass[1e10Msun/h]^2'
        }
    if False:
        # halos weighted by noisy mass
        #for myscatter in ['0.04dex','0.1dex','0.3dex','0.6dex']:
        for myscatter in ['0.1dex', '0.2dex', '0.4dex']:
            opts['cats']['delta_h_WEIGHT_M%s' % myscatter] = {
                'in_fname': opts['cats']['delta_h']['in_fname'],
                'weight_ptcles_by': 'MassWith%sScatter[1e10Msun/h]' % myscatter
            }

    ## Smoothing types and smoothing scales in Mpc/h
    smoothing_lst = []
    if True:
        # apply Gaussian smoothing
        #for R in [160.,80.,40.,20.,10.,5.,2.5]:
        for R in [20.]:
            #for R in [160.,40.,10.]:
            smoothing_lst.append(dict(type='Gaussian', R=R))
        print("smoothing_lst:", smoothing_lst)

    #### Output pickles and cache
    opts['pickle_path'] = '$SCRATCH/lssbisp2013/BenLT/pickle/'
    opts['cache_base_path'] = '$SCRATCH/lssbisp2013/BenLT/cache/'

    ## ANTI-ALIASING OPTIONS (do not change unless you know what you do)
    # Kmax above which to 0-pad. Should use kmax<=2pi/L*N/2 to avoid
    # unwanted Dirac delta images/foldings when multipling fields.
    opts['kmax'] = 2.0 * np.pi / opts['sim_boxsize'] * float(
        opts['Ngrid']) / 2.0
    #opts['kmax'] = None
    # CIC deconvolution of grid: None or 'grid_non_isotropic'
    opts[
        'grid_ptcle2grid_deconvolution'] = None  # 'grid_non_isotropic' #'grid_non_isotropic'
    # CIC deconvolution of power: None or 'power_isotropic_and_aliasing'
    opts['Pk_ptcle2grid_deconvolution'] = None

    #####################################
    # START PROGRAM
    #####################################

    ### derived options (do not move above b/c command line args might
    ### overwrite some options!)
    opts['in_path'] = path_utils.get_in_path(opts)
    # for output densities
    opts['out_rho_path'] = os.path.join(opts['in_path'],
                                        'out_rho_Ng%d' % opts['Ngrid'])

    # expand environment names in paths
    paths = {}
    for key in [
            'in_path', 'in_fname', 'in_fname_PTsim_psi_calibration',
            'in_fname_halos_to_displace_by_mchi', 'pickle_path',
            'cache_base_path', 'grids4plots_base_path', 'out_rho_path'
    ]:
        if opts.has_key(key):
            if opts[key] is None:
                paths[key] = None
            else:
                paths[key] = os.path.expandvars(opts[key])

                #mslogging.setup_logging(use_mpi=opts['use_mpi'])

    # unique id for cached files so we can run multiple instances at the same time
    file_exists = True
    while file_exists:
        cacheid = ('CACHE%06x' % random.randrange(16**6)).upper()
        paths['cache_path'] = os.path.join(paths['cache_base_path'], cacheid)
        file_exists = (len(glob.glob(paths['cache_path'])) > 0)
    # create cache path
    if not os.path.exists(paths['cache_path']):
        #os.system('mkdir -p %s' % paths['cache_path'])
        os.makedirs(paths['cache_path'])

    # Check some params
    if ((opts['grid_ptcle2grid_deconvolution'] is not None)
            and (opts['Pk_ptcle2grid_deconvolution'] is not None)):
        raise Exception(
            "Must not simultaneously apply ptcle2grid deconvolution to grid and Pk."
        )

    # #################################################################################
    # Init some thnigs
    # #################################################################################

    Nsmoothings = len(smoothing_lst)
    fig_hist, axlst_hist = plt.subplots(Nsmoothings,
                                        1,
                                        sharex=False,
                                        figsize=(6, 4 * Nsmoothings))

    pickle_dicts = OrderedDict()
    pickle_dicts['opts'] = opts.copy()

    # init some things
    cache_fnames = []

    gridx = None
    gridk = None

    gridk_cache_fname = os.path.join(paths['cache_path'],
                                     'gridk_qPk_my_cache.hdf5')
    cache_fnames.append(gridk_cache_fname)
    cached_columns = []
    if os.path.exists(gridk_cache_fname):
        os.remove(gridk_cache_fname)

    from nbodykit import setup_logging
    setup_logging()

    # ################################################################################
    # Compute density of all input catalogs
    # ################################################################################

    cat_infos = OrderedDict()
    for cat_id, cat_opts in opts['cats'].items():

        # default args for painting
        default_paint_kwargs = {
            'gridx': gridx,
            'gridk': gridk,
            'cache_path': paths['cache_path'],
            'Ngrid': opts['Ngrid'],
            'boxsize': opts['boxsize'],
            'grid_ptcle2grid_deconvolution':
            opts['grid_ptcle2grid_deconvolution'],
            'kmax': opts['kmax']
        }

        # nbodykit config
        config_dict = {
            'Nmesh': opts['Ngrid'],
            'output': os.path.join(paths['cache_path'], 'test_paint_baoshift'),
            'DataSource': {
                'plugin':
                'Subsample',
                'path':
                get_full_fname(paths['in_path'], cat_opts['in_fname'],
                               opts['ssseed'])
            },
            'Painter': {
                'plugin': 'DefaultPainter',
                'normalize': True,
                'setMean': 0.0
            }
        }

        if cat_opts['weight_ptcles_by'] is not None:
            config_dict['Painter']['weight'] = cat_opts['weight_ptcles_by']

        # paint deltanonl and save it in gridk.G[cat_id]
        if gridx is None and gridk is None:
            # return gridx and gridk
            gridx, gridk = paint_utils.paint_cat_to_gridk(
                config_dict, column=cat_id, **default_paint_kwargs)
        else:
            # modify existing gridx and gridk
            paint_utils.paint_cat_to_gridk(config_dict,
                                           column=cat_id,
                                           **default_paint_kwargs)

        print("\n\nINFO %s:\n" % cat_id)
        print(gridk.column_infos[cat_id])

        # save info in a more accessible way
        cat_infos[cat_id] = {
            'simple':
            simplify_cat_info(gridk.column_infos[cat_id],
                              weight_ptcles_by=cat_opts['weight_ptcles_by']),
            'full':
            gridk.column_infos[cat_id]
        }
        print("\n\nsimple cat info:")
        print(cat_infos[cat_id]['simple'])
        print("")

        # apply smoothing
        #gridk.apply_smoothing(cat_id, mode='Gaussian', R=20.0)

        # fft to x space
        gridx.append_column(cat_id, gridk.fft_k2x(cat_id, drop_column=False))

        if False:
            # test kappa2
            gridk.append_column('kappa2', gridk.calc_kappa2(cat_id,
                                                            gridx=gridx))
            gridx.append_column('kappa2',
                                gridk.fft_k2x('kappa2', drop_column=True))

        # test quadratic fields
        for quadfield in ['shift']:
            gridx.append_column(
                quadfield,
                gridk.calc_quadratic_field(basefield=cat_id,
                                           quadfield=quadfield))
            gridk.append_column(quadfield,
                                gridx.fft_x2k(quadfield, drop_column=True))

    # test compute_orthogonalized_fields
    # modifies gridk, Pkmeas
    gridk.rename_column('delta_h', 'ORTH s^0_0')
    gridk.rename_column('shift', 'ORTH s^0_1')
    osources, Pkmeas, ortho_rot_matrix_sources, orth_internals_sources = gridk.compute_orthogonalized_fields(
        N_ortho_iter=1,
        orth_method='CholeskyDecomp',
        all_in_fields=['ORTH s^0_0', 'ORTH s^0_1'],
        orth_prefix='ORTH s',
        non_orth_prefix='NON_ORTH s',
        Pkmeas=None,
        Pk_ptcle2grid_deconvolution=None,
        k_bin_width=1.0,
        delete_original_fields=True)

    for osource in osources:
        gridx.append_column(osource, gridk.fft_k2x(osource, drop_column=True))

    # ################################################################################
    # Empty cache
    # ################################################################################
    from shutil import rmtree
    rmtree(paths['cache_path'])

    raise Exception("continue here")
Ejemplo n.º 12
0
def main():
    """ 
    Script to compute HOD galaxies from FOF halo catalog with mvir.

    For batch runs, use e.g.

    for SEED in {0..4}; do python main_run_hod.py --fof_halos_mvir "/data/mschmittfull/lss/ms_gadget/run4/0000040${SEED}-01536-500.0-wig/nbkit_fof_0.6250/ll_0.200_nmin25_mvir/" --RSD 0; done
    """
    setup_logging()

    ap = ArgumentParser()
    ap.add_argument(
        '--fof_halos_mvir',
        help=
        ('Directory of halo catalog with mvir Mass, e.g.'
         '/data/mschmittfull/lss/ms_gadget/run4/00000400-01536-500.0-wig/nbkit_fof_0.6250/ll_0.200_nmin25_mvir/'
         ),
        #default='/data/mschmittfull/lss/ms_gadget/run4/00000400-01536-500.0-wig/nbkit_fof_0.6250/ll_0.200_nmin25_mvir/'
        default=
        '/Users/mschmittfull/scratch_data/lss/ms_gadget/run4/00000400-01536-500.0-wig/nbkit_fof_0.6250/ll_0.200_nmin25_mvir'
    )
    ap.add_argument('--RSD',
                    help='Add RSD to positions if not 0',
                    type=int,
                    default=0)
    ap.add_argument('--HOD_model_name',
                    help='Name of HOD model',
                    default='Zheng07_HandSeljak17_v2')

    args = ap.parse_args()
    RSD_LOS = np.array([0, 0, 1])

    # load input halo catalog
    print('Read halos from %s' % args.fof_halos_mvir)
    cat = BigFileCatalog(args.fof_halos_mvir)

    # run hod to get galaxy catalog
    galcat = run_hod(cat,
                     add_RSD=args.RSD,
                     RSD_LOS=RSD_LOS,
                     HOD_model_name=args.HOD_model_name)

    if True:
        # save to hdf5
        out_fname = os.path.join(args.fof_halos_mvir,
                                 '/HOD_%s' % args.HOD_model_name)
        if args.RSD:
            assert np.all(RSD_LOS == np.array([0, 0, 1]))
            out_fname += '_RSD001'
        out_fname += '.hdf5'
        save_galcat_to_hdf5(galcat, out_fname=out_fname)
        print('Wrote %s' % out_fname)

    # save to bigfile
    out_fname = '%s_HOD_%s' % (args.fof_halos_mvir, args.HOD_model_name)
    if args.RSD:
        assert np.all(RSD_LOS == np.array([0, 0, 1]))
        out_fname += '_RSD001'
    out_fname += '.bigfile'
    galcat.save(out_fname, columns=galcat.columns)
    print('Wrote %s' % out_fname)
Ejemplo n.º 13
0
def calculate_model_error(sim_opts=None,
                          grid_opts=None,
                          power_opts=None,
                          trf_fcn_opts=None,
                          ext_grids_to_load=None,
                          xgrids_in_memory=None,
                          kgrids_in_memory=None,
                          cats=None,
                          trf_specs=None,
                          keep_pickle=False,
                          pickle_file_format='dill',
                          pickle_path='$SCRATCH/perr/pickle/',
                          Pkmeas_helper_columns=None,
                          Pkmeas_helper_columns_calc_crosses=False,
                          store_Pkmeas_in_trf_results=False,
                          save_grids4plots=False,
                          grids4plots_base_path=None,
                          grids4plots_R=None,
                          cache_base_path=None,
                          RSDstrings=None,
                          code_version_for_pickles=None,
                          return_fields=None,
                          shifted_fields_Np=None,
                          shifted_fields_Nmesh=None,
                          shifted_fields_RPsi=None):
    """
    Calculate the model error for all models specified by trf_specs.

    Use return_fields=['bestfit'] or ['residual'] or ['bestfit','residual']
    to return the fields as well, as lists in same order as trf_specs.
    """

    # store opts in dict so we can save in pickle later
    opts = dict(
        sim_opts=sim_opts,
        grid_opts=grid_opts,
        power_opts=power_opts,
        trf_fcn_opts=trf_fcn_opts,
        ext_grids_to_load=ext_grids_to_load,
        #xgrids_in_memory=xgrids_in_memory,
        #kgrids_in_memory=kgrids_in_memory,
        cats=cats,
        trf_specs=trf_specs,
        keep_pickle=keep_pickle,
        pickle_file_format=pickle_file_format,
        pickle_path=pickle_path,
        Pkmeas_helper_columns=Pkmeas_helper_columns,
        Pkmeas_helper_columns_calc_crosses=Pkmeas_helper_columns_calc_crosses,
        store_Pkmeas_in_trf_results=store_Pkmeas_in_trf_results,
        save_grids4plots=save_grids4plots,
        grids4plots_base_path=grids4plots_base_path,
        grids4plots_R=grids4plots_R,
        cache_base_path=cache_base_path,
        code_version_for_pickles=code_version_for_pickles)

    #####################################
    # Initialize
    #####################################

    # make sure we keep the pickle if it is a big run and do not plot
    if grid_opts.Ngrid > 256:
        keep_pickle = True

    # load defaults if not set
    if ext_grids_to_load is None:
        ext_grids_to_load = sim_opts.get_default_ext_grids_to_load(
            Ngrid=grid_opts.Ngrid)

    if cats is None:
        cats = sim_opts.get_default_catalogs()

    ### derived options (do not move above b/c command line args might
    ### overwrite some options!)
    opts['in_path'] = path_utils.get_in_path(opts)
    # for output densities
    opts['out_rho_path'] = os.path.join(opts['in_path'],
                                        'out_rho_Ng%d' % grid_opts.Ngrid)

    # expand environment names in paths
    paths = {}
    for key in [
            'in_path', 'in_fname', 'in_fname_PTsim_psi_calibration',
            'in_fname_halos_to_displace_by_mchi', 'pickle_path',
            'cache_base_path', 'grids4plots_base_path', 'out_rho_path'
    ]:
        if opts.has_key(key):
            if opts[key] is None:
                paths[key] = None
            else:
                paths[key] = os.path.expandvars(opts[key])

    setup_logging()
    comm = CurrentMPIComm.get()
    logger = logging.getLogger('PerrCalc')

    model_spec.check_trf_specs_consistency(trf_specs)

    # Init Pickler instance to save pickle later (this will init pickle fname)
    pickler = None
    if comm.rank == 0:
        pickler = Pickler(path=paths['pickle_path'],
                          base_fname='main_calc_Perr',
                          file_format=pickle_file_format,
                          rand_sleep=(grid_opts.Ngrid > 128))
        print("Pickler: ", pickler.full_fname)
    pickler = comm.bcast(pickler, root=0)

    # where to save grids for slice and scatter plots
    if save_grids4plots:
        paths['grids4plots_path'] = os.path.join(
            paths['grids4plots_base_path'],
            os.path.basename(pickler.full_fname))
        if comm.rank == 0:
            if not os.path.exists(paths['grids4plots_path']):
                os.makedirs(paths['grids4plots_path'])
            print("grids4plots_path:", paths['grids4plots_path'])

    paths['cache_path'] = utils.make_cache_path(paths['cache_base_path'], comm)

    # Get list of all densities actually needed for trf fcns.
    densities_needed_for_trf_fcns = utils.get_densities_needed_for_trf_fcns(
        trf_specs)
    #opts['densities_needed_for_trf_fcns'] = densities_needed_for_trf_fcns

    # ##########################################################################
    # Run program.
    # ##########################################################################

    #if opts.get('RSDstrings', ['']) != ['']:
    if True or RSDstrings not in [None, ['']]:
        # calculate D and f
        cosmo = CosmoModel(**sim_opts.cosmo_params)
        calc_Da = generate_calc_Da(cosmo=cosmo)
        f_log_growth = calc_f_log_growth_rate(a=sim_opts.sim_scale_factor,
                                              calc_Da=calc_Da,
                                              cosmo=cosmo,
                                              do_test=True)
        # save in opts so we can easily access it throughout code (although strictly
        # speaking it is not a free option but derived from cosmo_params)
        opts['f_log_growth'] = f_log_growth
    else:
        opts['f_log_growth'] = None

    # Compute best-fit model and power spectra
    # TODO: maybe split into method computing field and separate method to compute power spectra.
    # For now, load fields from cache as workaround (see below)
    pickle_dict = combine_fields.paint_combine_and_calc_power(
        trf_specs=trf_specs,
        paths=paths,
        catalogs=cats,
        needed_densities=densities_needed_for_trf_fcns,
        ext_grids_to_load=ext_grids_to_load,
        xgrids_in_memory=xgrids_in_memory,
        kgrids_in_memory=kgrids_in_memory,
        trf_fcn_opts=trf_fcn_opts,
        grid_opts=grid_opts,
        sim_opts=sim_opts,
        power_opts=power_opts,
        save_grids4plots=save_grids4plots,
        grids4plots_R=grids4plots_R,
        Pkmeas_helper_columns=Pkmeas_helper_columns,
        Pkmeas_helper_columns_calc_crosses=Pkmeas_helper_columns_calc_crosses,
        store_Pkmeas_in_trf_results=store_Pkmeas_in_trf_results,
        f_log_growth=opts['f_log_growth'])

    # Load fields from cache if they shall be returned
    # (actually not used anywhere, could delete)
    if return_fields is not None:
        if 'bestfit' in return_fields:
            bestfit_fields = []
            for trf_spec in trf_specs:
                # load bestfit fields from cache
                gridk = ComplexGrid(fname=pickle_dict['gridk_cache_fname'],
                                    read_columns=[trf_spec.save_bestfit_field])
                bestfit_fields.append(gridk.G[trf_spec.save_bestfit_field])
                del gridk

        if 'residual' in return_fields:
            residual_fields = []
            for trf_spec in trf_specs:
                # load residual field from cache
                residual_key = '[%s]_MINUS_[%s]' % (
                    trf_spec.save_bestfit_field, trf_spec.target_field)

                gridk = ComplexGrid(fname=pickle_dict['gridk_cache_fname'],
                                    read_columns=[residual_key])

                residual_fields.append(gridk.G[residual_key])
                del gridk

    # copy over opts so they are saved
    assert not pickle_dict.has_key('opts')
    pickle_dict['opts'] = opts.copy()

    # save all resutls to pickle
    if comm.rank == 0:
        pickler.write_pickle(pickle_dict)

    # print path with grids for slice and scatter plotting
    if save_grids4plots:
        print("grids4plots_path: %s" % paths['grids4plots_path'])

    # print save_bestfit_fields
    save_bestfit_fields = [t.save_bestfit_field for t in opts['trf_specs']]
    print('\nsave_bestfit_fields:\n' + '\n'.join(save_bestfit_fields))

    # delete pickle if not wanted any more
    if comm.rank == 0:
        if keep_pickle:
            print("Pickle: %s" % pickler.full_fname)
        else:
            pickler.delete_pickle_file()

        # delete cache dir
        from shutil import rmtree
        rmtree(paths['cache_path'])

    if return_fields in [False, None]:
        return pickle_dict
    elif return_fields == ['bestfit']:
        return bestfit_fields, pickle_dict
    elif return_fields == ['residual']:
        return residual_fields, pickle_dict
    elif return_fields == ['bestfit', 'residual']:
        return bestfit_fields, residual_fields, pickle_dict
Ejemplo n.º 14
0
def main():
    ns = ap.parse_args()
    if ns.verbose:
        setup_logging('info')
    cat = BigFileCatalog(ns.catalog, dataset="1")
    params = FastPMParams(''.join(cat.attrs['ParamFile']), cwd=ns.cwd)

    if params['sigma8'] != 0:
        raise ValueError("overriding sigma8 is not supported")

    nmesh = ns.nmesh or params['nc']
    if cat.comm.rank == 0:
        cat.logger.info("Nmesh = %d", nmesh)

    strides = cat.attrs["q.strides"]
    scale = cat.attrs["q.scale"]
    shift = cat.attrs["q.shift"]

    pm = ParticleMesh([nmesh] * 3, cat.attrs['BoxSize'], comm=cat.comm)

    k, Pk = params.read_powerspectrum()

    def pklin(k_):
        return numpy.interp(k_, k, Pk)

    def tf(k_):
        return (pklin(k_.normp(2, zeromode=1.0)**0.5) / pm.BoxSize.prod()**0.5)

    dlin = pm.generate_whitenoise(params['random_seed'],
                                  unitary=params['remove_cosmic_variance'],
                                  type='untransposedcomplex')
    dlin = dlin.apply(lambda k, v: tf(k) * v).c2r()

    if cat.comm.rank == 0:
        cat.logger.info("linear field generated.")

    Nchunks = max(cat.comm.allgather(cat.size)) // (4 * 1024 * 1024)
    Nchunks = max(1, Nchunks)

    if cat.comm.rank == 0:
        cat.logger.info("Nchunks = %d", Nchunks)

    ID = cat['ID']

    delta = numpy.empty(len(ID), dtype='f4')

    if cat.comm.rank == 0:
        cat.logger.info("delta allocated for %d particles.", cat.csize)

    def work(ID, i):
        if cat.comm.rank == 0:
            cat.logger.info("%d / %d", i, Nchunks)
        Q = id2q(ID, strides=strides, scale=scale, shift=shift)
        csize = cat.comm.allreduce(ID.size)
        if csize == 0:
            return numpy.zeros(len(Q), dtype='f4')
        cmin = numpy.min(cat.comm.allgather(Q.min(axis=0)), axis=0)
        cmax = numpy.max(cat.comm.allgather(Q.max(axis=0)), axis=0)
        if cat.comm.rank == 0:
            cat.logger.info("Q = [ %s ] - [ %s ], len(Q) = %d", cmin, cmax,
                            csize)

        layout = pm.decompose(Q)
        if cat.comm.rank == 0:
            cat.logger.info("decompose finished.")
        delta = dlin.readout(Q, layout=layout)
        if cat.comm.rank == 0:
            cat.logger.info("readout done.")
        csum1 = cat.comm.allreduce(delta.sum())
        csum2 = cat.comm.allreduce((delta**2).sum())
        cmean = csum1 / csize
        cstd = (csum2 / csize - (csum1 / csize)**2)**0.5
        if cat.comm.rank == 0:
            cat.logger.info("On rank0, <delta> = %g, std(delta) = %g", cmean,
                            cstd)
        return delta

    for i in range(Nchunks):
        chunk = slice(i * len(ID) // Nchunks, (i + 1) * len(ID) // Nchunks)
        delta[chunk] = work(ID[chunk].compute(), i)

    cat[ns.ocolumn] = delta

    if ns.ocatalog is None:
        ns.ocatalog = ns.catalog

    cat.save(ns.ocatalog,
             columns=[ns.ocolumn],
             header=None,
             dataset=ns.dataset)
    cat.comm.barrier()
    if cat.comm.rank == 0:
        cat.logger.info("done")
Ejemplo n.º 15
0
def main(ns, ns1, ns2):
    if ns.verbose:
        setup_logging('info')

    if ns.unique_k:
        dk = 0
    else:
        dk = None

    cat1 = read_cat(ns1)
    cat2 = read_cat(ns2)

    nmin = numpy.unique(
        numpy.int32(
            numpy.logspace(numpy.log10(ns.nmin),
                           numpy.log10(ns.nmax),
                           ns.nn,
                           endpoint=True)))
    if 'Length' in cat1.columns:
        nmin0 = cat1.comm.allreduce(
            cat1['Length'].min().compute() if cat1.size > 0 else 10000000,
            MPI.MIN)
        nmax0 = cat1.comm.allreduce(
            cat1['Length'].max().compute() if cat1.size > 0 else 0, MPI.MAX)
        nmin = nmin[nmin >= nmin0]
        nmin = nmin[nmin < nmax0]
    else:
        nmin = [0]

    if 'Length' in cat2.columns:
        nmin2 = cat2.comm.allreduce(
            cat2['Length'].min().compute() if cat2.size > 0 else 10000000,
            MPI.MIN)
        nmax2 = cat2.comm.allreduce(
            cat2['Length'].max().compute() if cat2.size > 0 else 0, MPI.MAX)
    else:
        nmin2 = 0
        nmax2 = 1

    if cat1.comm.rank == 0:
        os.makedirs(os.path.dirname(ns.output), exist_ok=True)

    for nmin1 in nmin:
        cat1 = read_cat(ns1, nmin1)
        nsel = cat1.comm.allreduce(cat1['Selection'].sum().compute())
        cat2 = read_cat_nsel(ns2, nsel, nmin2, nmax2)

        mesh1 = cat1.to_mesh(interlaced=True,
                             compensated=True,
                             window='tsc',
                             Nmesh=ns.nmesh,
                             position='RSDPosition')
        mesh2 = cat2.to_mesh(interlaced=True,
                             compensated=True,
                             window='tsc',
                             Nmesh=ns.nmesh,
                             position='RSDPosition')

        r1 = FFTPower(mesh1,
                      second=mesh1,
                      mode='2d',
                      dk=dk,
                      Nmu=10,
                      kmax=ns.kmax)
        r2 = FFTPower(mesh2,
                      second=mesh2,
                      mode='2d',
                      dk=dk,
                      Nmu=10,
                      kmax=ns.kmax)
        rx = FFTPower(mesh1,
                      second=mesh2,
                      mode='2d',
                      dk=dk,
                      Nmu=10,
                      kmax=ns.kmax)

        save_bs(ns.output, 'nmin-%05d-r1' % nmin1, r1)
        save_bs(ns.output, 'nmin-%05d-r2' % nmin1, r2)
        save_bs(ns.output, 'nmin-%05d-rx' % nmin1, rx)
        if cat1.comm.rank == 0:
            print("nmin = ", nmin1, "finished")
Ejemplo n.º 16
0
The xy plane is set to the galactic plane; such that any anomaly due to this pasting
is obscured by the galaxy.
FIXME: the conversion between redshift of source plane and comoving distance of
source plane uses a hardcoded Planck15 cosmology. Since we never talk about redshift
in an accurate way for wlen, this is probably OK for now. 
"""

import nbodykit
from nbodykit.lab import BigFileCatalog
from nbodykit.transform import ConcatenateSources, CartesianToEquatorial
from nbodykit.cosmology import Planck15
import numpy
import bigfile

from mpi4py import MPI
nbodykit.setup_logging()
nbodykit.set_options(dask_chunk_size=1024 * 1024)
nbodykit.set_options(global_cache_size=0)

from nbodykit.utils import DistributedArray, GatherArray

#nbodykit.set_options(global_cache_size=128)

import dask.array as da

# formula (from Sukhdeep Singh)

# int dss  Ps(zs) [ int dxl omega_m / sigma(zs, zl) delta_m(t, zl)]

# sukhdeep swapped the integral order
def main():
    """ 
    Script to convert Rockstar halo catalog to bigfile catalog.
    Login to a single node on helios and run there on command line.

    For batch runs, use e.g.

    for SEED in {0..1}; do python main_rockstar_catalog_to_bigfile.py --rockstar_halos "/scratch/mschmittfull/lss/ms_gadget/run4/0000040${SEED}-01536-1500.0-wig/snap_0.6250.gadget3/rockstar_out_0.list" --max_rows 5 --include_parent_ID; done
    """
    setup_logging()

    ap = ArgumentParser()
    ap.add_argument(
        '--rockstar_halos',
        help=
        ('File name of Rockstar halo catalog, e.g.'
         '/data/mschmittfull/lss/ms_gadget/run4/00000400-01536-500.0-wig/snap_0.6250.gadget3/rockstar_out_0.list'
         ),
        default=
        '/scratch/mschmittfull/lss/ms_gadget/run4/00000400-01536-500.0-wig/snap_0.6250.gadget3/rockstar_out_0.list.parents'
    )

    ap.add_argument('--add_RSD',
                    dest='RSD',
                    action='store_true',
                    help='Add RSD to position')

    ap.add_argument('--include_parent_ID',
                    dest='include_parent_ID',
                    action='store_true',
                    help='Include ID and parent ID in bigfile.')

    # ap.add_argument(
    #     '--RSD', help='Add RSD to positions if not 0',
    #     type=int,
    #     default=0)

    ap.add_argument('--max_rows',
                    help='Max number of rows to read. Read all if 0.',
                    type=int,
                    default=0)

    ap.set_defaults(RSD=False, include_parent_ID=False)

    args = ap.parse_args()
    RSD_LOS = np.array([0, 0, 1])

    # load input halo catalog
    print('Read halos from %s' % args.rockstar_halos)

    # read header
    with open(args.rockstar_halos) as myfile:
        header = [next(myfile) for x in xrange(16)]
    header = ''.join(header)
    print('Header:')
    print(header)

    # get names of columns
    np_cat1 = np.genfromtxt(args.rockstar_halos, names=True, max_rows=1)
    names = np_cat1.dtype.names
    # keep only a subset
    usecol_names = ['X', 'Y', 'Z', 'VX', 'VY', 'VZ', 'Mvir']
    if args.include_parent_ID:
        usecol_names += ['ID', 'PID']

    usecols = []
    for column_number, name in enumerate(names):
        if name in usecol_names:
            usecols.append(column_number)

    print('usecols:', usecols)
    print([names[usecol] for usecol in usecols])

    # read data.
    print('Reading data')
    if args.max_rows == 0:
        max_rows = None
    else:
        max_rows = args.max_rows

    # TODO: np.loadtxt should be faster, but now take 5 minutes so probably ok.
    np_cat = np.genfromtxt(args.rockstar_halos,
                           names=True,
                           max_rows=max_rows,
                           usecols=usecols)

    print('Read data:')
    print(np_cat[:5])

    # convert to arraycatalog
    cat = ArrayCatalog(np_cat)

    # fill position and velocity
    pos = np.empty(cat.csize, dtype=[('Position', ('f8', 3))])
    pos['Position'][:, 0] = cat['X']
    pos['Position'][:, 1] = cat['Y']
    pos['Position'][:, 2] = cat['Z']
    cat['Position'] = pos['Position']
    del pos

    vel = np.empty(cat.csize, dtype=[('Velocity', ('f8', 3))])
    vel['Velocity'][:, 0] = cat['VX']
    vel['Velocity'][:, 1] = cat['VY']
    vel['Velocity'][:, 2] = cat['VZ']
    # todo: what units?
    cat['Velocity'] = vel['Velocity']
    del vel

    cat['log10Mvir'] = np.log10(cat['Mvir'])

    # Keep only some columns
    keep_columns = ['Position', 'Velocity', 'log10Mvir']
    if args.include_parent_ID:
        # also keep halo ID and parent ID
        keep_columns += ['ID', 'PID']

    cat = catalog_persist(cat, keep_columns)
    cat.attrs['rockstar_header'] = header

    if args.RSD:
        raise Exception('RSD not implemented')

    print('Will write data:')
    for c in keep_columns:
        print('%s:' % c, cat[c])

    # save to bigfile
    if max_rows is None:
        out_fname = '%s.bigfile' % args.rockstar_halos
    else:
        out_fname = '%s_max_rows%d.bigfile' % (args.rockstar_halos, max_rows)

    if os.path.exists(out_fname):
        rmtree(out_fname)

    if cat.comm.rank == 0:
        print('Writing to %s' % out_fname)
    cat.save(out_fname, columns=keep_columns)
    if cat.comm.rank == 0:
        print('Wrote %s' % out_fname)
Ejemplo n.º 18
0
def calc_and_save_model_errors_at_cat_pos(
        sim_opts=None,
        grid_opts=None,
        power_opts=None,
        trf_fcn_opts=None,
        ext_grids_to_load=None,
        cat_specs=None,
        trf_specs=None,
        keep_pickle=False,
        pickle_file_format='dill',
        pickle_path='$SCRATCH/perr/pickle/',
        Pkmeas_helper_columns=None,
        Pkmeas_helper_columns_calc_crosses=False,
        cache_base_path=None,
        code_version_for_pickles=None,
        shifted_fields_Np=None,
        shifted_fields_Nmesh=None):
    """
    Calculate the model error for all models specified by trf_specs.

    Do this by reading out the model at the positions of objects in the catalog.
    """

    # store opts in dict so we can save in pickle later
    opts = dict(
        sim_opts=sim_opts,
        grid_opts=grid_opts,
        power_opts=power_opts,
        trf_fcn_opts=trf_fcn_opts,
        ext_grids_to_load=ext_grids_to_load,
        #xgrids_in_memory=xgrids_in_memory,
        #kgrids_in_memory=kgrids_in_memory,
        cat_specs=cat_specs,
        trf_specs=trf_specs,
        keep_pickle=keep_pickle,
        pickle_file_format=pickle_file_format,
        pickle_path=pickle_path,
        Pkmeas_helper_columns=Pkmeas_helper_columns,
        cache_base_path=cache_base_path,
        code_version_for_pickles=code_version_for_pickles,
        shifted_fields_Np=shifted_fields_Np,
        shifted_fields_Nmesh=shifted_fields_Nmesh)

    #####################################
    # Initialize
    #####################################

    # make sure we keep the pickle if it is a big run and do not plot
    if grid_opts.Ngrid > 256:
        keep_pickle = True

    # load defaults if not set
    if ext_grids_to_load is None:
        ext_grids_to_load = sim_opts.get_default_ext_grids_to_load(
            Ngrid=grid_opts.Ngrid)

    if cat_specs is None:
        cat_specs = {}

    ### derived options (do not move above b/c command line args might
    ### overwrite some options!)
    opts['in_path'] = path_utils.get_in_path(opts)
    # for output densities
    opts['out_rho_path'] = os.path.join(opts['in_path'],
                                        'out_rho_Ng%d' % grid_opts.Ngrid)

    # expand environment names in paths
    paths = {}
    for key in [
            'in_path', 'in_fname', 'in_fname_PTsim_psi_calibration',
            'in_fname_halos_to_displace_by_mchi', 'pickle_path',
            'cache_base_path', 'grids4plots_base_path', 'out_rho_path'
    ]:
        if opts.has_key(key):
            if opts[key] is None:
                paths[key] = None
            else:
                paths[key] = os.path.expandvars(opts[key])

    setup_logging()
    comm = CurrentMPIComm.get()
    logger = logging.getLogger('PerrCalc')

    # make sure there are no duplicate save_bestfit_field entries
    model_spec.check_trf_specs_consistency(trf_specs)

    # Init Pickler instance to save pickle later (this will init pickle fname)
    pickler = None
    if comm.rank == 0:
        pickler = Pickler(path=paths['pickle_path'],
                          base_fname='main_calc_vel_at_halopos_Perr',
                          file_format=pickle_file_format,
                          rand_sleep=(grid_opts.Ngrid > 128))
        print("Pickler: ", pickler.full_fname)
    pickler = comm.bcast(pickler, root=0)

    paths['cache_path'] = utils.make_cache_path(paths['cache_base_path'], comm)

    # Get list of all densities actually needed for trf fcns.
    #densities_needed_for_trf_fcns = utils.get_densities_needed_for_trf_fcns(
    #    trf_specs)

    # ##########################################################################
    # Run program.
    # ##########################################################################

    # calculate D and f
    cosmo = CosmoModel(**sim_opts.cosmo_params)
    calc_Da = generate_calc_Da(cosmo=cosmo)
    f_log_growth = calc_f_log_growth_rate(a=sim_opts.sim_scale_factor,
                                          calc_Da=calc_Da,
                                          cosmo=cosmo,
                                          do_test=True)
    # save in opts so we can easily access it throughout code (although strictly
    # speaking it is not a free option but derived from cosmo_params)
    opts['f_log_growth'] = f_log_growth

    # Compute model at catalog positions, and residual to target.
    pickle_dict = calc_model_errors_at_cat_pos(
        trf_specs=trf_specs,
        paths=paths,
        cat_specs=cat_specs,
        ext_grids_to_load=ext_grids_to_load,
        trf_fcn_opts=trf_fcn_opts,
        grid_opts=grid_opts,
        sim_opts=sim_opts,
        power_opts=power_opts,
        Pkmeas_helper_columns=Pkmeas_helper_columns,
        Pkmeas_helper_columns_calc_crosses=Pkmeas_helper_columns_calc_crosses,
        f_log_growth=opts['f_log_growth'])

    # copy over opts so they are saved
    assert not pickle_dict.has_key('opts')
    pickle_dict['opts'] = opts.copy()

    # save all resutls to pickle
    if comm.rank == 0:
        pickler.write_pickle(pickle_dict)

    # print save_bestfit_fields
    save_bestfit_fields = [t.save_bestfit_field for t in opts['trf_specs']]
    print('\nsave_bestfit_fields:\n' + '\n'.join(save_bestfit_fields))

    # delete pickle if not wanted any more
    if comm.rank == 0:
        if keep_pickle:
            print("Pickle: %s" % pickler.full_fname)
        else:
            pickler.delete_pickle_file()

        # delete cache dir
        from shutil import rmtree
        rmtree(paths['cache_path'])

    return pickle_dict
#### produces an nbodykit catalog from websky or some other (on dev)
#### contact: [email protected]
##### Project the field onto a healpix map: 'maps_field'.
from nbodykit.lab import *
from nbodykit import setup_logging
setup_logging() # log output to stdout
import numpy as np
import matplotlib.pyplot as plt
import scipy
from scipy.interpolate import *
import sys, platform, os
from numpy import genfromtxt
import healpy as hp
from IPython.display import display
import six
import warnings
import fitsio
from nbodykit.source.catalog import FITSCatalog
import random
## \user selections
size = 10000000 # size of the catalog
chi_bins = np.linspace(0,5000, 10) # equal-chi bins (where chi is the comoving distance).
# if know, the size of the intendend healpix map that will be used for redshift
# binned velocity or density maps. each halo maps on to a single pixel in total.
# (many halos can map onto the same pixel, of course)
NSIDE = 2**6
## \user selections
NPIX = 12*NSIDE**2
# initialize the map
maps_overdensity = dict()
maps_numbercount = dict()
Ejemplo n.º 20
0
    Updates:
    Sep 3, 2019: modify ``/home/mehdi/miniconda3/envs/\
                        py3p6/lib/python3.7/site-packages/\
                        nbodykit/algorithms/pair_counters/base.py''
                        to include self pairs

'''
import sys
import numpy as np
import nbodykit.lab as nb  # Import Nbodykit
#from nbodykit.cosmology import Planck15 as cosmo
from nbodykit.cosmology import Cosmology
from nbodykit import setup_logging
from time import time

setup_logging("info")  # Info

from nbodykit import CurrentMPIComm
comm = CurrentMPIComm.get()
rank = comm.rank
size = comm.size

#  I/O
if rank == 0:
    space = sys.argv[1]
    zmin = float(sys.argv[2])
    zmax = float(sys.argv[3])
    print(space, zmin, zmax)
else:
    space = None
    zmin = None
Ejemplo n.º 21
0
from runtests.mpi import MPITest
from nbodykit.lab import *
from nbodykit import setup_logging
from numpy.testing import assert_allclose, assert_array_equal
import os

setup_logging("debug")

# The test result data (threeptcf_sim_result.dat) is computed with
# Daniel Eisenstein's
# C++ implementation on the same input data set for poles up to l=11;
# We shall agree with it to high precision.
#
# If we need to reproduced these files:
# Nick Hand sent the code and instructions to Yu Feng on Aug-20-2018.

data_dir = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'data')

@MPITest([4])
def test_sim_threeptcf(comm):

    import tempfile

    BoxSize = 400.0

    # load the test data
    filename = os.path.join(data_dir, 'threeptcf_sim_data.dat')
    cat = CSVCatalog(filename, names=['x', 'y', 'z', 'w'], comm=comm)
    cat['Position'] = transform.StackColumns(cat['x'], cat['y'], cat['z'])
    cat['Position'] *= BoxSize
Ejemplo n.º 22
0
from astropy.cosmology import Planck15 as pycosmo
from astropy.io import fits
from sys import argv
from logmocks import assign_randzs, getopts
from math import pi
from scipy.constants import speed_of_light as lightspeed
from Corrfunc.mocks import DDrppi_mocks

import matplotlib.pyplot as plt
import pylab as pl
import numpy as np
import healpy as hp
import dask.array as da
import dask
import os
'''
setup_logging()


args       = getopts(argv)

try:
    seed   = np.int(args['-i'])
except:
    seed   = 1                                      ## Default argument

redshift    = 0.3

home_dir    = os.environ['HOME']
scratch_dir = os.environ['SCRATCH']
Ejemplo n.º 23
0
from __future__ import print_function
from runtests.mpi import MPITest

from numpy.testing import assert_allclose
from numpy.testing.decorators import skipif
import fastpm

try:
    import nbodykit
    nbodykit.setup_logging('debug')
except ImportError:
    nbodykit = None


@MPITest([1, 4])
@skipif(True or nbodykit is None, "nbodykit test doesn't work on travis; is not installed")
def test_nbkit(comm):
    from fastpm.nbkit import FastPMCatalogSource
    from nbodykit.lab import cosmology, FOF, LinearMesh
    cosmo = cosmology.Planck15
    power = cosmology.LinearPower(cosmo, 0)

    linear = LinearMesh(power, 256., 64, seed=400, comm=comm)
    sim = FastPMCatalogSource(linear, boost=2, Nsteps=5, cosmo=cosmo)
    fof = FOF(sim, 0.2, 8)
    sim['Labels'] = fof.labels
    sim.save('nbkit-%d' % comm.size, ['Position', 'InitialPosition', 'Displacement', 'Labels'])
    features = fof.find_features()
    features.save('nbkit-fof-%d' % comm.size, ['CMPosition', 'Length'])
    #print(features._size, features._csize)
    assert_allclose(features.csize, 719, rtol=0.01)
Ejemplo n.º 24
0
def main(ns, ns1, ns2):
    if ns.verbose:
        setup_logging('info')

    cat1 = read_cat1(ns, ns1)

    mesh1 = cat1.paint(mode='complex')

    cat2 = read_cat2(ns, ns2)

    if ns.unique_k:
        dk = 0
    else:
        dk = None

    rm = FFTPower(mesh1,
                  second=mesh1,
                  mode='2d',
                  dk=dk,
                  Nmu=10,
                  kmax=ns.kmax * 10)
    nmin = numpy.unique(
        numpy.int32(
            numpy.logspace(numpy.log10(ns.nmin),
                           numpy.log10(ns.nmax),
                           ns.nn,
                           endpoint=True)))
    if 'Length' in cat2.columns:
        nmin0 = cat1.comm.allreduce(
            cat2['Length'].min().compute() if cat2.size > 0 else 10000000,
            MPI.MIN)
        nmax0 = cat1.comm.allreduce(
            cat2['Length'].max().compute() if cat2.size > 0 else 0, MPI.MAX)
        nmin = nmin[nmin >= nmin0]
        nmin = nmin[nmin < nmax0]
    else:
        nmin = [0]

    Nmodes = (rm.power['modes'] * (rm.power['k'] < ns.kmax)).sum()

    if cat1.comm.rank == 0:
        print('Using %d modes to estimate bias and growth rate' % Nmodes)

        dirname = os.path.dirname(ns.output)
        if len(dirname) > 0:
            os.makedirs(dirname, exist_ok=True)

    save_bs(ns.output, 'a-matter', rm)

    r = []
    b = []
    a = []
    f = []
    if cat1.comm.rank == 0:
        print('# Nmin bias growthrate abundance')
    for nmin1 in nmin:
        cat2 = read_cat2(ns, ns2, nmin1)
        mesh2 = cat2.to_mesh(interlaced=True,
                             compensated=True,
                             window='tsc',
                             Nmesh=ns.nmesh,
                             position='RSDPosition')
        mesh3 = cat2.to_mesh(interlaced=True,
                             compensated=True,
                             window='tsc',
                             Nmesh=ns.nmesh,
                             position='Position')

        r_rsd = FFTPower(mesh1,
                         second=mesh2,
                         mode='2d',
                         dk=dk,
                         Nmu=10,
                         kmax=ns.kmax * 10)
        r_real = FFTPower(mesh1,
                          second=mesh3,
                          mode='2d',
                          dk=dk,
                          Nmu=10,
                          kmax=ns.kmax * 10)

        r.append(r_rsd)

        save_bs(ns.output, 'x-nmin-%05d' % nmin1, r[-1])
        bias, gr = fit_bias(r_rsd, r_real, rm, ns.kmax)
        abundance = r[-1].attrs['N2'] / cat2.attrs['BoxSize'][0]**3
        b.append(bias)
        a.append(abundance)
        f.append(gr)
        if cat1.comm.rank == 0:
            print(nmin1, bias, gr, abundance)

    basename = ns.output.rsplit('.', 1)[0]

    if cat1.comm.rank == 0:
        numpy.savetxt(basename + '-bias.txt', numpy.array([nmin, b, f, a]).T)

    if ns.with_plot:
        if cat1.comm.rank == 0:
            figure = make_plot(rm, r, nmin, b, f, ns.kmax)
            figure.savefig(basename + '.png')
Ejemplo n.º 25
0
from runtests.mpi import MPITest
from nbodykit.lab import *
from nbodykit import setup_logging

# debug logging
setup_logging("debug")


@MPITest([1, 4])
def test_fibercolls(comm):

    from scipy.spatial.distance import pdist, squareform
    from nbodykit.utils import ScatterArray, GatherArray

    CurrentMPIComm.set(comm)
    N = 10000

    # generate the initial data
    numpy.random.seed(42)
    if comm.rank == 0:
        ra = 10. * numpy.random.random(size=N)
        dec = 5. * numpy.random.random(size=N) - 5.0
    else:
        ra = None
        dec = None

    ra = ScatterArray(ra, comm)
    dec = ScatterArray(dec, comm)

    # compute the fiber collisions
    r = FiberCollisions(ra, dec, degrees=True, seed=42)
Ejemplo n.º 26
0
    Use Nbodykit to compute P0


    - Jan 16: add nmodes to output
    - Nov 28: The functionality for reading multiple randoms does not work
'''
import sys
import nbodykit.lab as nb
import numpy as np

from nbodykit.transform import SkyToCartesian
from scipy.interpolate import InterpolatedUnivariateSpline
from nbodykit.cosmology import Planck15 as cosmo
from nbodykit import setup_logging

setup_logging("info")

from nbodykit import CurrentMPIComm
comm = CurrentMPIComm.get()
rank = comm.rank
size = comm.size

#  I/O
if rank == 0:
    from argparse import ArgumentParser
    ap = ArgumentParser(description='Power Spectrum')
    ap.add_argument(
        '--data',
        default='/B/Shared/Shadab/FA_LSS/FA_EZmock_desi_ELG_v0_15.fits')
    #ap.add_argument('--randoms', nargs='*', type=str, default='/B/Shared/Shadab/FA_LSS/FA_EZmock_desi_ELG_v0_rand_0*.fits')
    ap.add_argument(