Example #1
0
def mk_mock_coords(radeczfile, outfile, simul_cosmo):

    if simul_cosmo == "Planck":
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5

    rad = np.arange(1.0, 67.0, 5.0)

    radecz = h5_arr(radeczfile, "radecz")

    cart = np.zeros(radecz.shape)

    for i, rdz in enumerate(radecz):

        ra = Angle(rdz[0], u.deg)
        dec = Angle(rdz[1], u.deg)

        losd = cosmo.comoving_distance(rdz[2])
        dis = Distance(losd, u.Mpc)

        coord = ICRSCoordinates(ra, dec, distance=dis)

        cart[i, :] = np.array([coord.x, coord.y, coord.z])

    arr2h5(cart, outfile, "coords", mode='w')
def mk_mock_coords(radeczfile, outfile, simul_cosmo):

    if simul_cosmo == "Planck":
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5

    rad = np.arange(1.0, 67.0, 5.0)

    radecz = h5_arr(radeczfile, "radecz")

    cart = np.zeros(radecz.shape)

    for i, rdz in enumerate(radecz):

        ra = Angle(rdz[0], u.deg)
        dec = Angle(rdz[1], u.deg)

        losd = cosmo.comoving_distance(rdz[2])
        dis = Distance(losd)

        coord = ICRSCoordinates(ra, dec, distance=dis)

        cart[i, :] = np.array([coord.x.value, coord.y.value, coord.z.value])

    np.savetxt(outfile, cart)
def mk_coords(radecfile, outfile, cosmology):

    # Set the cosmology with h free
    if cosmology == "Planck":
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif cosmology == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5

    f_in = h5.File(radecfile)
    radecz = f_in["radecz"]

    f_out = h5.File(outfile)
    cart = f_out.create_dataset("cart_pts", shape=(radecz.shape[0], 3),
                                dtype='float64')

    for i in range(radecz.shape[0]):
        ra = Angle(radecz[i, 0], u.deg)
        dec = Angle(radecz[i, 1], u.deg)

        losd = cosmo.comoving_distance(radecz[i, 2])
        dis = Distance(losd)

        coord = ICRSCoordinates(ra, dec, distance=dis)

        cart[i, :] = np.array([coord.x, coord.y, coord.z])

    f_in.close()
    f_out.close()
def mk_coords(radecfile, outfile, cosmology):

    # Set the cosmology with h free
    if cosmology == "Planck":
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif cosmology == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5

    f_in = h5.File(radecfile)
    radecz = f_in["radecz"]

    f_out = h5.File(outfile)
    cart = f_out.create_dataset("cart_pts",
                                shape=(radecz.shape[0], 3),
                                dtype='float64')

    for i in range(radecz.shape[0]):
        ra = Angle(radecz[i, 0], u.deg)
        dec = Angle(radecz[i, 1], u.deg)

        losd = cosmo.comoving_distance(radecz[i, 2])
        dis = Distance(losd)

        coord = ICRSCoordinates(ra, dec, distance=dis)

        cart[i, :] = np.array([coord.x, coord.y, coord.z])

    f_in.close()
    f_out.close()
def generate_tracks(mt, aexp_list, z0_clusters):
    """tracks most massive progenitors for reach halo"""

    mt.add_column("is_main_line", "BOOLEAN", table="mergertree", default=0)

    properties = [
        "x", "y", "z", "vx", "vy", "vz", "M_hc", "r_hc", "num_particles"
    ]
    z0_halos = mt.get_halo_properties(z0_clusters, properties, aexp_list[0])

    for cluster in z0_clusters:
        print "Finding progenitor line for z0 id: " + str(cluster)

        tree = mt.get_full_tree(cluster)

        parent = z0_halos[z0_halos["id"] == cluster].squeeze()

        progenitor_line = []

        for i, aexp in enumerate(aexp_list[1:]):

            parent_id = parent["id"]

            progenitors = tree[(tree['aexp'] == aexp)
                               & (tree['parent_id'] == parent_id)]

            #this can be pandas-ized
            t1 = WMAP5.age(1. / aexp_list[i - 1] - 1.)
            t2 = WMAP5.age(1. / aexp_list[i] - 1.)
            dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7  #convert to seconds

            #apply smaller searchbox to exclude far off merger candidates
            searchbox = ut.define_searchbox(parent,
                                            dt,
                                            mt.boxsize,
                                            search_distance=1.5)

            #identify most massive progenitor
            if not progenitors.empty:
                main_progenitor = identify_most_massive_progenitor(
                    progenitors, searchbox=searchbox)
                if main_progenitor is None:
                    break

                mt.mark_main_line(main_progenitor['aexp'],
                                  main_progenitor['id'],
                                  parent_id=main_progenitor['parent_id'])
                parent = main_progenitor
                progenitor_line.append(parent["id"])

            else:
                break

        print "end of tree for cluster " + str(cluster)
        print progenitor_line

        mt.commit()
Example #6
0
def get_inv_efunc(cosmology):

    if cosmology == "Planck":
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif cosmology == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5

    return cosmo.inv_efunc
Example #7
0
def get_comv(cosmology):

    if cosmology == "Planck":
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif cosmology == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5

    return cosmo.comoving_distance
def mk_mock_srch(radecfile, nzdictfile, Nsph, simul_cosmo):

    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    radecarr = h5_arr(radecfile, "good_pts")
    nzdict = json.load(open(nzdictfile))

    Nrands = radecarr.shape[0]
    Narrs = Nsph / Nrands
    remain = Nsph % Nrands

    radecz = np.zeros((Nsph, 3))

    for i in range(Narrs):

        start = Nrands * i
        stop = Nrands * (i + 1)
        radecz[start:stop, :2] = radecarr[:, :]

    endchunk = Nrands * (Narrs)
    radecz[endchunk:, :2] = radecarr[:remain, :]

    rad = np.arange(1.0, 67.0, 5.0)
    zlo = nzdict["zlo"]
    zhi = nzdict["zhi"]

    radeczlist = len(rad) * [radecz]

    for r_i, r in enumerate(rad):

        dis_near = Distance(comv(zlo).value + r, u.Mpc)
        dis_far = Distance(comv(zhi).value - r, u.Mpc)

        z_a = dis_near.compute_z(cosmology=cosmo)

        z_b = dis_far.compute_z(cosmology=cosmo)

        randz = (z_a ** 3 + \
                 (z_b ** 3 - z_a ** 3) * np.random.rand(Nsph)) ** (1. / 3.)

        radeczlist[r_i][:, 2] = randz[:]

        arr2h5(
            radeczlist[r_i], "{0}/{1}/mocks/mock_srch_pts.hdf5".format(
                os.path.dirname(radecfile), simul_cosmo),
            "radecz_{0}".format(str(r_i * 5 + 1)))
def generate_tracks(mt, aexp_list, z0_clusters) :
    """tracks most massive progenitors for reach halo"""

    mt.add_column("is_main_line", "BOOLEAN", table="mergertree", default=0)

    properties = ["x", "y", "z", "vx", "vy", "vz", "M_hc", "r_hc", "num_particles"]
    z0_halos = mt.get_halo_properties(z0_clusters, properties, aexp_list[0])

    for cluster in z0_clusters:
        print "Finding progenitor line for z0 id: "+str(cluster)

        tree = mt.get_full_tree(cluster)

        parent = z0_halos[z0_halos["id"] == cluster].squeeze()

        progenitor_line = []

        for i,aexp in enumerate(aexp_list[1:]):

            parent_id = parent["id"]

            progenitors = tree[(tree['aexp'] == aexp) & 
                               (tree['parent_id'] == parent_id)]

            #this can be pandas-ized
            t1 = WMAP5.age(1./aexp_list[i-1]-1.)
            t2 = WMAP5.age(1./aexp_list[i]-1.)
            dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7 #convert to seconds
       
            #apply smaller searchbox to exclude far off merger candidates
            searchbox = ut.define_searchbox(parent, dt,  mt.boxsize,
                                            search_distance=1.5)

            #identify most massive progenitor
            if not progenitors.empty :
                main_progenitor = identify_most_massive_progenitor(progenitors, 
                                                                   searchbox=searchbox)
                if main_progenitor is None:
                    break

                mt.mark_main_line(main_progenitor['aexp'], main_progenitor['id'],
                                    parent_id = main_progenitor['parent_id'])
                parent = main_progenitor
                progenitor_line.append(parent["id"])

            else:
                break

        print "end of tree for cluster "+str(cluster)
        print progenitor_line 

        mt.commit()
Example #10
0
def mk_mock_srch(radecfile, nzdictfile, Nsph, simul_cosmo):

    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    radecarr = h5_arr(radecfile, "good_pts")
    nzdict = json.load(open(nzdictfile))

    Nrands = radecarr.shape[0]
    Narrs = Nsph / Nrands
    remain = Nsph % Nrands

    radecz = np.zeros((Nsph, 3))

    for i in range(Narrs):

        start = Nrands * i
        stop = Nrands * (i + 1)
        radecz[start:stop, :2] = radecarr[:, :]

    endchunk = Nrands * (Narrs)
    radecz[endchunk:, :2] = radecarr[:remain, :]

    rad = np.arange(1.0, 67.0, 5.0)
    zlo = nzdict["zlo"]
    zhi = nzdict["zhi"]

    radeczlist = len(rad) * [radecz]

    for r_i, r in enumerate(rad):

        dis_near = Distance(comv(zlo) + r, u.Mpc)
        dis_far = Distance(comv(zhi) - r, u.Mpc)

        z_a = dis_near.compute_z(cosmology=cosmo)

        z_b = dis_far.compute_z(cosmology=cosmo)

        randz = (z_a ** 3 + \
                 (z_b ** 3 - z_a ** 3) * np.random.rand(Nsph)) ** (1. / 3.)

        radeczlist[r_i][:, 2] = randz[:]

        arr2h5(radeczlist[r_i], "{0}/{1}/mocks/mock_srch_pts.hdf5".format(os.path.dirname(radecfile), simul_cosmo), "radecz_{0}".format(str(r_i * 5 + 1)))
def mock_vpf(mock_cart_coords, spheresfile, simul_cosmo, rad):

    gals = h5_arr(mock_cart_coords, "coords")

    print gals

    name = mock_cart_coords.split("/")[-1].split(".")[0]

    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    gal_baum = cKDTree(gals)

    spheres = h5_arr(spheresfile, "radecz_{0}".format(str(int(rad))))

    print spheres

    for i, sphere in enumerate(spheres):

        rang = Angle(sphere[0], u.deg)
        decang = Angle(sphere[1], u.deg)

        dis = Distance(comv(sphere[2]), u.Mpc)

        coord = ICRSCoordinates(rang, decang, distance=dis)

        sph_cen = np.array([coord.x, coord.y, coord.z])

        nn = gal_baum.query(sph_cen)

        print "rad: ", rad, ", sphere: ", i

        f = open(
            "{0}/vpf_out/ascii/{1}_{2}.dat".format(
                os.path.dirname(spheresfile), name, str(int(rad))), 'a')

        if not nn[0] < rad:
            f.write("1\n")
        else:
            f.write("0\n")

        f.close()
Example #12
0
def load_tmerger_from_file(sim, dir, aexp=1.0):

    last_merger = sim.load_last_merger_epochs(dir,aexp)

    tmerger = {}

    for id in last_merger.keys():
        merger_z = (1./float(last_merger[id])-1)
        merger_time = WMAP5.age(merger_z)
        cosmic_age = WMAP5.age(1./float(aexp)-1)

#        merger_time = cosmocalc(merger_z)["zage_Gyr"]
#        cosmic_age = cosmocalc(1./float(aexp)-1)["zage_Gyr"]

        tmerger[id] = cosmic_age - merger_time

    return tmerger
Example #13
0
def mock_vpf(mock_cart_coords, spheresfile, simul_cosmo, rad):

    gals = h5_arr(mock_cart_coords, "coords")

    print gals

    name = mock_cart_coords.split("/")[-1].split(".")[0]

    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    gal_baum = cKDTree(gals)

    spheres = h5_arr(spheresfile, "radecz_{0}".format(str(int(rad))))

    print spheres

    for i, sphere in enumerate(spheres):

        rang = Angle(sphere[0], u.deg)
        decang = Angle(sphere[1], u.deg)

        dis = Distance(comv(sphere[2]), u.Mpc)

        coord = ICRSCoordinates(rang, decang, distance=dis)

        sph_cen = np.array([coord.x, coord.y, coord.z])

        nn = gal_baum.query(sph_cen)

        print "rad: ", rad, ", sphere: ", i

        f = open("{0}/vpf_out/ascii/{1}_{2}.dat".format(os.path.dirname(spheresfile), name, str(int(rad))), 'a')

        if not nn[0] < rad:
            f.write("1\n")
        else:
            f.write("0\n")

        f.close()
Example #14
0
def load_tmerger_from_file(sim, dir, aexp=1.0):

    last_merger = sim.load_last_merger_epochs(dir,aexp)

    tmerger = {}

    for id in last_merger.keys():
        merger_z = (1./float(last_merger[id])-1)
        merger_time = WMAP5.age(merger_z)
        cosmic_age = WMAP5.age(1./float(aexp)-1)

#        merger_time = cosmocalc(merger_z)["zage_Gyr"]
#        cosmic_age = cosmocalc(1./float(aexp)-1)["zage_Gyr"]

        tmerger[id] = cosmic_age - merger_time

    return tmerger
Example #15
0
def mock_vpf(mock_cart_coords, spheresfile, simul_cosmo):

    gals = h5_arr(mock_cart_coords, "coords")
    name = mock_cart_coords.split("/")[-1].split(".")[0]

    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    gal_baum = cKDTree(gals)

    rad = np.arange(1.0, 67.0, 5.0)

    for r_i, r in enumerate(rad):

        spheres = h5_arr(spheresfile, "radecz_{0}".format(str(r_i * 5 + 1)))
        voids = np.zeros(spheres.shape[0])

        for i, sphere in enumerate(spheres):

            rang = Angle(sphere[0], u.deg)
            decang = Angle(sphere[1], u.deg)

            dis = Distance(comv(sphere[2]), u.Mpc)

            coord = ICRSCoordinates(rang, decang, distance=dis)

            sph_cen = np.array([coord.x.value, coord.y.value, coord.z.value])

            nn = gal_baum.query(sph_cen)

            print "rad: ", r, ", sphere: ", i

            if not nn[0] < r:

                voids[i] = 1

        arr2h5(voids,
                "{0}/vpf_out/{1}.hdf5".format(os.path.dirname(spheresfile), name),
                "voids_{0}".format(str(r_i * 5 + 1)))
def get_theta(
    z_max
):  #set the theta min and theta max for each redshift to a physical range of 0.3Mpc - 2.5Mpc
    theta_min = []
    theta_max = []
    for red in range(0, 25):
        d1 = cosmo.comoving_distance([z_max[red]])
        thetamin = 0.3 * 180 / (numpy.pi * d1.value)
        thetamax = 2.5 * 180 / (numpy.pi * d1.value)
        theta_min.append(thetamin)
        theta_max.append(thetamax)
    return (theta_min, theta_max)
Example #17
0
def _initialize_cosmology():
    """Initlized internal __core__ variables storing the trend of redshift vs
    comoving distance. Default cosmology is from WMAP5.
    ----------------------------------------------------------------------------
    Args:
        None
    Returns:
        None
    """
    # TODO:
    #     Talk to someone about this. Are globals the best way to do this.
    redshift_array = np.linspace(0.0, 10.0, 1000)
    comov_array = WMAP5.comoving_distance(redshift_array)
    global _comov_dist_to_redshift_spline
    _comov_dist_to_redshift_spline = iu_spline(comov_array, redshift_array)
    global _initilized_cosmology
    _initilized_cosmology = True
    return None
def process_nbar(nbarfile, nz_dict_file, cosmology, radeczfile=None):
    """
    Parameters
    ---------

    nbarfile : str
        the path to and name of the corrected nbar file
    nz_dict_file : str
        path to and name of the json file with the nbar dict
    cosmology : str, "WMAP" or "Planck"
        the cosmology to compute shell volumes with
    radeczfile : str, "data" or "mock"
        the data or mock file to process
    """

    # magic number for width around maximum
    Q = 0.65
    # magic number for shell vol computation
    Nfrac = (6769.0358 * np.pi) / 129600

    if cosmology == "Planck":
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif cosmology == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    nbar_corr = np.loadtxt(nbarfile)
    nz_dict = {"tophat height for zrange": Q}

    # Cut out the first bit of crap (works for CMASS, dunno about LOWZ)
    ind03 = np.abs(nbar_corr[:, 0] - 0.3).argmin()

    nbar_corr = nbar_corr[ind03:, :]

    zcen = nbar_corr[:, 0]
    z_near = nbar_corr[:, 1]
    z_far = nbar_corr[:, 2]
    corr_gal_counts = nbar_corr[:, 6]

    nbar = []
    shell_vols = []

    for i in range(len(zcen)):

        shell_vols.append(Nfrac * calc_shell_vol(comv, z_near[i], z_far[i], zcen[i]))
        nbar.append(corr_gal_counts[i] / shell_vols[i])

    nbar = np.array(nbar)
    shell_vols = np.array(shell_vols)

    # Find nbar peak and index
    max_nbar = np.max(nbar)
    max_i = int(np.where(nbar == max_nbar)[0])

    nz_dict["max_nbar_corr"] = max_nbar
    nz_dict["nbar_corr_tophat"] = Q * max_nbar
    nz_dict["z_nbar_max"] = zcen[max_i]

    # get the interval edge indices
    L = np.abs(nbar[:max_i] - max_nbar * Q).argmin()
    R = max_i + np.abs(nbar[max_i:] - max_nbar * Q).argmin()

    nbar = nbar[L:R + 1]
    shell_vols = shell_vols[L:R + 1]

    nz_dict["zlo"] = zcen[L]
    nz_dict["zhi"] = zcen[R]

    nz_dict["avg_nbar_corr"] = np.average(nbar)
    nz_dict["total_shell_vol"] = np.sum(shell_vols)

    if radeczfile:

        radecz = h5_arr(radeczfile, "radecz")

        # Make the redshift cut in the nbar array with right cosmology
        nbar_corr = nbar_corr[(nz_dict["zlo"] <= nbar_corr[:, 0]) * \
                            (nbar_corr[:, 0] <= nz_dict["zhi"])]

        # Get binning those observed galaxies
        zbinedges = np.append(nbar_corr[0, 1], nbar_corr[:, 2])

        # Find the counts per bin and convert to nbar
        H = np.histogram(radecz[:, 2], bins=zbinedges)
        hist_nbar = H[0] / shell_vols

        if not radeczfile.split('/')[-2] == "mocks_hierarchical":
            # save the average downsampled value if it's the data file
            nz_dict["avg_nbar_down"] = np.average(hist_nbar)

            # The number to downsample to in each bin
            # (multiply bin number by the relative fraction determined from
            #  corrected distribution of nbar)
            nz_dict["nbar_data_tophat"] = 0.95 * nz_dict["nbar_corr_tophat"] * (nz_dict["avg_nbar_down"] / nz_dict["avg_nbar_corr"])
            factor_arr = nz_dict["nbar_data_tophat"] / hist_nbar

        # if we are dealing with a mockfile, then there is an extra factor to make
        # the average equal that of the data
        elif radeczfile.split('/')[-2] == "mocks_hierarchical":

            # have to open the existing json file
            jf = open(nz_dict_file)
            nz_dict = json.load(jf)

            factor_arr = nz_dict["nbar_data_tophat"] / hist_nbar

            jf.close()

        num_down = np.rint(factor_arr * H[0])
        num_down = num_down.astype(int)

        # make a mask for the final array for analysis within the redshift limits
        finmask = np.array(radecz.shape[0] * [False])

        for i, nd in enumerate(num_down):
            """Turn on the right amount of galaxies in each bin."""
            zbin_ids = np.where(((zbinedges[i] < radecz[:, 2]) * (radecz[:, 2] <= zbinedges[i + 1])) == True)

            if zbin_ids[0].shape[0] == 0:
                continue

            keep = np.random.choice(zbin_ids[0], size=nd, replace=False)

            finmask[keep] = True

        radecz = radecz[finmask]

        if not radeczfile.split('/')[-2] == "mocks_hierarchical":
            # and save downsampled array to a hdf5 file
            arr2h5(radecz, "{0}/radecz_down.hdf5".format(os.path.dirname(nz_dict_file)), "radecz", mode='w')

        elif radeczfile.split('/')[-2] == "mocks_hierarchical":
            # save mocks to a hdf5 file
            mock_no = radeczfile.split('/')[-1].split('.')[0]
            arr2h5(radecz, "{0}/mocks/rdz_down/{1}.hdf5".format(os.path.dirname(nz_dict_file), mock_no), "radecz", mode='w')


    if not radeczfile.split('/')[-2] == "mocks_hierarchical":
        # don't save the json if we're working on a mock
        nf = open(nz_dict_file, 'w')
    
        json.dump(nz_dict, nf, sort_keys=True, indent=4, separators=(',', ':\t'))
    
        nf.close()
def generate_links(mt, aexp_list, z0_clusters, particles_dir) :
    """Generates links from halo particle data"""

    if restart:
        phalos, last_aexp = mt.restart_mergertree()
        print "restarting merger tree code from %0.4f" % last_aexp
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir, last_aexp)

    else:
        phalos = z0_clusters
        z0id_0 = dict((phalo, phalo) for phalo in phalos)
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir, aexp_list[0])
    
    particles_parent = cart_io.read_halo_particles(particles_file, clusters=phalos)

    for i0 in range(len(aexp_list)-1):

        i1 = i0 + 1 
        parent_aexp = aexp_list[i0]
        child_aexp = aexp_list[i1]

        if restart and child_aexp >= last_aexp:
            continue

        t1 = WMAP5.age(1./parent_aexp-1.)
        t2 = WMAP5.age(1./child_aexp-1.)
        dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7 #convert to seconds

        print "Finding matches for %0.4f, %0.4f" % (parent_aexp, child_aexp)
        print str(len(phalos))+" halos identified for matching"

        #get halo properties from database
        properties = ["x","y","z", "vx","vy","vz", "M_hc", "r_hc", "num_particles"]
        halos = mt.get_halo_properties(phalos, properties, parent_aexp)

        #load child_particles
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir, child_aexp)
        particles_child = cart_io.read_halo_particles(particles_file, min_np = 0)

        links_data = []
        z0id_1 = {}
        #loop through parent halos

        for phalo_id in phalos:
            
            phalo = halos[halos["id"] == phalo_id].squeeze()

            logging.debug( "Checking cluster,", parent_aexp, phalo )

            searchbox = ut.define_searchbox(phalo, dt,  mt.boxsize, 
                                            search_distance=search_distance_multiplier)

            chalos = mt.get_halos_within_distance(child_aexp, searchbox, ["x","y","z"],
                            mass_cut = 0.01*phalo["M_hc"])


            prog_found = False
            if not chalos.empty:            

                z0id = z0id_0[phalo_id]

                this_parent_particles_set = set(map(itemgetter(0),particles_parent[phalo_id]))
                min_joint_particles = min_joint_particles_factor * len(this_parent_particles_set)

                #loop through satellites within search box
                for i, chalo in chalos.iterrows():

                    chalo_id = chalo["id"]

                    #consider redoing with np.intersect1d(parent, child)
                    shared_particles = this_parent_particles_set.intersection(map(itemgetter(0),particles_child[chalo_id]))
                
                    if len(shared_particles) > min_joint_particles:

                        ratio = float(len(particles_child[chalo_id]))/float(len(particles_parent[phalo_id])) 
                        logging.debug( "Possible progenitor found: ", chalo_id, len(shared_particles), ratio)

                        distance = ut.distance_between_halos(chalo, phalo, mt.boxsize)

                        links_data.append((parent_aexp, child_aexp, phalo_id, chalo_id, len(shared_particles),
                                           ratio, distance, z0id, 0 ))
                        z0id_1[chalo_id] = z0id
                        prog_found = True

            if not prog_found:
                logging.debug("No matches found for halo "+str(phalo_id)+" at aexp = "+str(child_aexp))
                mt.mark_leaf(parent_aexp, phalo_id)
        
        mt.insert("mergertree", links_data)            
        mt.commit()

        z0id_0 = z0id_1
        phalos = z0id_0.keys()
        particles_parent = particles_child
def generate_links(mt, aexp_list, z0_clusters, particles_dir):
    """Generates links from halo particle data"""

    if restart:
        phalos, last_aexp = mt.restart_mergertree()
        print "restarting merger tree code from %0.4f" % last_aexp
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir,
                                                           last_aexp)

    else:
        phalos = z0_clusters
        z0id_0 = dict((phalo, phalo) for phalo in phalos)
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir,
                                                           aexp_list[0])

    particles_parent = cart_io.read_halo_particles(particles_file,
                                                   clusters=phalos)

    for i0 in range(len(aexp_list) - 1):

        i1 = i0 + 1
        parent_aexp = aexp_list[i0]
        child_aexp = aexp_list[i1]

        if restart and child_aexp >= last_aexp:
            continue

        t1 = WMAP5.age(1. / parent_aexp - 1.)
        t2 = WMAP5.age(1. / child_aexp - 1.)
        dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7  #convert to seconds

        print "Finding matches for %0.4f, %0.4f" % (parent_aexp, child_aexp)
        print str(len(phalos)) + " halos identified for matching"

        #get halo properties from database
        properties = [
            "x", "y", "z", "vx", "vy", "vz", "M_hc", "r_hc", "num_particles"
        ]
        halos = mt.get_halo_properties(phalos, properties, parent_aexp)

        #load child_particles
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir,
                                                           child_aexp)
        particles_child = cart_io.read_halo_particles(particles_file, min_np=0)

        links_data = []
        z0id_1 = {}
        #loop through parent halos

        for phalo_id in phalos:

            phalo = halos[halos["id"] == phalo_id].squeeze()

            logging.debug("Checking cluster,", parent_aexp, phalo)

            searchbox = ut.define_searchbox(
                phalo,
                dt,
                mt.boxsize,
                search_distance=search_distance_multiplier)

            chalos = mt.get_halos_within_distance(child_aexp,
                                                  searchbox, ["x", "y", "z"],
                                                  mass_cut=0.01 *
                                                  phalo["M_hc"])

            prog_found = False
            if not chalos.empty:

                z0id = z0id_0[phalo_id]

                this_parent_particles_set = set(
                    map(itemgetter(0), particles_parent[phalo_id]))
                min_joint_particles = min_joint_particles_factor * len(
                    this_parent_particles_set)

                #loop through satellites within search box
                for i, chalo in chalos.iterrows():

                    chalo_id = chalo["id"]

                    #consider redoing with np.intersect1d(parent, child)
                    shared_particles = this_parent_particles_set.intersection(
                        map(itemgetter(0), particles_child[chalo_id]))

                    if len(shared_particles) > min_joint_particles:

                        ratio = float(len(particles_child[chalo_id])) / float(
                            len(particles_parent[phalo_id]))
                        logging.debug("Possible progenitor found: ", chalo_id,
                                      len(shared_particles), ratio)

                        distance = ut.distance_between_halos(
                            chalo, phalo, mt.boxsize)

                        links_data.append(
                            (parent_aexp, child_aexp, phalo_id, chalo_id,
                             len(shared_particles), ratio, distance, z0id, 0))
                        z0id_1[chalo_id] = z0id
                        prog_found = True

            if not prog_found:
                logging.debug("No matches found for halo " + str(phalo_id) +
                              " at aexp = " + str(child_aexp))
                mt.mark_leaf(parent_aexp, phalo_id)

        mt.insert("mergertree", links_data)
        mt.commit()

        z0id_0 = z0id_1
        phalos = z0id_0.keys()
        particles_parent = particles_child
Example #21
0
def vpf(dat_dir, Nsph, simul_cosmo, rad):

    # Grab the data coordinates
    gals = h5_arr("./dat/out/{0}/{1}/gals_cart_coords.hdf5".
                      format(dat_dir, simul_cosmo), "cart_pts")

    # Get details about the redshift interval being considered
    nbar_dict = json.load(open("./dat/out/{0}/{1}/nbar_zrange.json".
                                   format(dat_dir, simul_cosmo)))
    zlo = nbar_dict["zlo"]
    zhi = nbar_dict["zhi"]

    # Get the search points
    good_pts = h5_arr("./dat/out/{0}/srch_radec.hdf5".format(dat_dir), "good_pts")
    bad_pts = h5_arr("./dat/out/{0}/veto.hdf5".format(dat_dir),
                     "bad_pts")

    # Set angular radius of effective area around bad points
    bad_r = np.arccos(1.0 - (np.pi * 9.8544099e-05) / (2 * 180 ** 2))
    bad_r_deg = np.rad2deg(bad_r)

    # Set the cosmology with h free
    # Here the cosmology is based on WMAP (for first MultiDark simulation)
    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    # Build the trees

    # galaxy tree
    gal_baum = cKDTree(gals)

    # tree of bad points (angular coordinates on unit sphere)
    bad_xyz = radec2xyz(bad_pts)
    veto_baum = cKDTree(bad_xyz)

    # Initialise final output arrays
#    rad = np.arange(1.0, 67.0, 5.0)  doing it one radius at a time
#    P_0 = np.zeros(rad.shape)

    # No. of spheres and norm
#     Nsph_arr = Nsph * np.array(4 * [0.01] + 4 * [0.1] + 4 * [1.0])
#     norm = 1. / Nsph_arr
#    norm = 1. / Nsph

    rand_i = 0

    for r_i, r in enumerate(rad):

        # start the count of successful voids
        count = 0

        # Custom zrange for sphere size
        dis_near = Distance(comv(zlo).value + r, u.Mpc)
        dis_far = Distance(comv(zhi).value - r, u.Mpc)

        z_a = dis_near.compute_z(cosmology=cosmo)

        z_b = dis_far.compute_z(cosmology=cosmo)

        for i in range(Nsph):  # _arr[r_i]):

            # compensate for finite length of mask file
            rand_i = rand_i % 999999

            radec = good_pts[rand_i, :]

            rang = Angle(radec[0], u.deg)
            decang = Angle(radec[1], u.deg)

            randz = (z_a ** 3 + \
                     (z_b ** 3 - z_a ** 3) * np.random.rand(1)[0]) ** (1. / 3.)
            dis = Distance(comv(randz), u.Mpc)

            coord = ICRSCoordinates(rang, decang, distance=dis)

            sph_cen = np.array([coord.x.value, coord.y.value, coord.z.value])

            nn = gal_baum.query(sph_cen)

            print "rad: ", r, ", sphere: ", i

            if not nn[0] < r:

                # add instance to probability count
                count += 1

                # record quality of sphere using spline values for intersection
                # with bad points

                # Get radius of circular projection of sphere
                R = np.arcsin(r / np.sqrt(np.sum(sph_cen[:] ** 2)))

                # Get coordinates of circle centre on unit sphere
                crc_cen = radec2xyz(radec)[0]

                # Compute tree search radius from Cosine rule
                # (include points extending beyond sphere edge to account for
                # finite area around bad points)
                l_srch = np.sqrt(2. - 2. * np.cos(R))

                # Run search
                pierce_l = veto_baum.query_ball_point(crc_cen, l_srch)

                bad_vol = 0.

                R = np.degrees(R)  # need in degrees for bad_vol computation

                for pt in pierce_l:

                    pt_ang = bad_pts[pt]
                    dis = np.degrees(central_angle(pt_ang, radec))
                    l = dis / R

                    bad_vol += 1.5 * (bad_r_deg / R) ** 2 \
                                   * np.sqrt(1.0 - l ** 2)

                f_r = open("./dat/out/{0}/{1}/vpf_out/volfrac_{2}.dat".
                               format(dat_dir, simul_cosmo, r),
                           'a')
                f_r.write("{0}\n".format(bad_vol))
                f_r.close()

            rand_i += 1
Example #22
0
Create a custom cosmology object
>>> from astropy.cosmology import FlatLambdaCDM
>>> cosmo = FlatLambdaCDM(H0=70, Om0=0.3)
>>> cosmo
FlatLambdaCDM(H0=70, Om0=0.3, Ode0=0.7)

Compute the comoving volume to z=6.5 in cubic Mpc using
this cosmology
>>> cosmo.comoving_volume(6.5)
2521696198211.6924

Compute the age of the universe in Gyr using the
pre-defined WMAP 5-year and WMAP 9-year cosmologies
>>> from astropy.cosmology import WMAP5, WMAP9
>>> WMAP5.age(0)
13.723782349795023
>>> WMAP9.age(0)
13.768899510689097

Create a cosmology with a varying `w'
>>> from astropy.cosmology import Flatw0waCDM
>>> cosmo = Flatw0waCDM(H0=70, Om0=0.3, w0=-1, wa=0.2)

Find the separation in proper kpc at z=4 corresponding to
10 arcsec in this cosmology compared to a WMAP9 cosmology
>>> cosmo.kpc_proper_per_arcmin(4) * 10 / 60.
68.87214405278925
>>> WMAP9.kpc_proper_per_arcmin(4) * 10 / 60.
71.21374615575363
Example #23
0
def calc_age_from_aexp(aexp):

   return WMAP5.age(1./float(aexp)-1).value
from scipy.spatial import cKDTree


simul_cosmo = "WMAP"


def spherical_cap(h):
    return 0.75 * (h ** 2) * (1 - h / 3)


if simul_cosmo == "Planck":
    # First make h free
    Planck13.__init__(100.0, Planck13.Om0)
    cosmo = Planck13
elif simul_cosmo == "WMAP":
    WMAP5.__init__(100.0, WMAP5.Om0)
    cosmo = WMAP5

Nsph = 10000000
rad = np.arange(5.0, 66.0, 5.0)

As = np.arange(0.0, 1.0, 0.05)
Bs = np.arange(0.0, 1.0, 0.05)

splarr = np.loadtxt("test_dat/edge_splarr.dat")
A, B = np.meshgrid(As, Bs)

inty = interp2d(A[0, :], B[:, 0], splarr)

for r_i, r in enumerate(rad):
Example #25
0
bad_pts = np.loadtxt("in/CMASS_DATA/north_block_outside.dat", usecols=(0, 1))

nbar_vals = json.load(
    open("out/{0}/{1}/nbar_zrange.json".format(survey_cap, simul_cosmo)))
zlo = nbar_vals["zlo"]
zhi = nbar_vals["zhi"]

bad_r = np.arccos(1.0 - (np.pi * 9.8544099e-05) / (2 * 180**2))
bad_r_deg = np.rad2deg(bad_r)

if simul_cosmo == "Planck":
    Planck13.__init__(100.0, Planck13.Om0)
    cosmo = Planck13
elif simul_cosmo == "WMAP":
    WMAP5.__init__(100.0, WMAP5.Om0)
    cosmo = WMAP5
comv = cosmo.comoving_distance


def radec2xyz(radecarr):

    radecarr = np.atleast_2d(radecarr)
    xyzarr = np.zeros((radecarr.shape[0], 3))
    xyzarr[:, 0] = np.cos(np.radians(radecarr[:, 1])) * \
                   np.cos(np.radians(radecarr[:, 0]))
    xyzarr[:, 1] = np.cos(np.radians(radecarr[:, 1])) * \
                   np.sin(np.radians(radecarr[:, 0]))
    xyzarr[:, 2] = np.sin(np.radians(radecarr[:, 1]))

    return xyzarr
Example #26
0
def process_nbar(nbarfile, nz_dict_file, cosmology, radeczfile=None):
    """
    Parameters
    ---------

    nbarfile : str
        the path to and name of the corrected nbar file
    nz_dict_file : str
        path to and name of the json file with the nbar dict
    cosmology : str, "WMAP" or "Planck"
        the cosmology to compute shell volumes with
    radeczfile : str, "data" or "mock"
        the data or mock file to process
    """

    # magic number for width around maximum
    Q = 0.65
    # magic number for shell vol computation
    Nfrac = (6769.0358 * np.pi) / 129600

    if cosmology == "Planck":
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif cosmology == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    nbar_corr = np.loadtxt(nbarfile)
    nz_dict = {"tophat height for zrange": Q}

    # Cut out the first bit of crap (works for CMASS, dunno about LOWZ)
    ind03 = np.abs(nbar_corr[:, 0] - 0.3).argmin()

    nbar_corr = nbar_corr[ind03:, :]

    zcen = nbar_corr[:, 0]
    z_near = nbar_corr[:, 1]
    z_far = nbar_corr[:, 2]
    corr_gal_counts = nbar_corr[:, 6]

    nbar = []
    shell_vols = []

    for i in range(len(zcen)):

        shell_vols.append(Nfrac * calc_shell_vol(comv, z_near[i], z_far[i], zcen[i]))
        nbar.append(corr_gal_counts[i] / shell_vols[i])

    nbar = np.array(nbar)

    # Find nbar peak and index
    max_nbar = np.max(nbar)
    max_i = int(np.where(nbar == max_nbar)[0])

    nz_dict["max_nbar_corr"] = max_nbar
    nz_dict["nbar_corr_tophat"] = Q * max_nbar
    nz_dict["z_nbar_max"] = zcen[max_i]

    # get the interval edge indices
    L = np.abs(nbar[:max_i] - max_nbar * Q).argmin()
    R = max_i + np.abs(nbar[max_i:] - max_nbar * Q).argmin()

    nbar = nbar[L:R + 1]
    shell_vols = shell_vols[L:R + 1]

    nz_dict["zlo"] = zcen[L]
    nz_dict["zhi"] = zcen[R]

    nz_dict["avg_nbar_corr"] = np.average(nbar)
    nz_dict["total_shell_vol"] = np.sum(shell_vols)

    if radeczfile:

        radecz = h5_arr(radeczfile, "radecz")

        # Make the redshift cut in the nbar array with right cosmology
        nbar_corr = nbar_corr[(nz_dict["zlo"] <= nbar_corr[:, 0]) * \
                            (nbar_corr[:, 0] <= nz_dict["zhi"])]

        # Get binning those observed galaxies
        zbinedges = np.append(nbar_corr[0, 1], nbar_corr[:, 2])

        # Find the counts per bin
        H = np.histogram(radecz[:, 2], bins=zbinedges)

        # The number to downsample to in each bin
        # (multiply bin number by the relative fraction determined from
        #  corrected distribution of nbar)
        num_down = np.rint((nz_dict["nbar_corr_tophat"] / nbar[:]) * H[0])
        num_down = num_down.astype(int)

        # make a mask for the final array for analysis within the redshift limits
        finmask = np.array(radecz.shape[0] * [False])

        for i, nd in enumerate(num_down):
            """Turn on the right amount of galaxies in each bin."""
            zbin_ids = np.where(((zbinedges[i] < radecz[:, 2]) * (radecz[:, 2] <= zbinedges[i + 1])) == True)

            if zbin_ids[0].shape[0] == 0:
                continue

            keep = np.random.choice(zbin_ids[0], size=nd, replace=False)

            finmask[keep] = True

        radecz = radecz[finmask]

        if not radeczfile.split('/')[-2] == "mocks_hierarchical":
            # now get nbar for the downsampled data for use in mock processing and simulation
            gal_counts = np.histogram(radecz[:, 2], bins=zbinedges)[0]

            nbar_down = []

            for i in range(len(gal_counts)):

                nbar_down.append(gal_counts[i] / shell_vols[i])

            nbar_down = np.array(nbar_down)

            # save the average downsampled value
            nz_dict["avg_nbar_down"] = np.average(nbar_down)

            # and save downsampled array to a hdf5 file
            arr2h5(radecz, "{0}/radecz_down.hdf5".format(os.path.dirname(nz_dict_file)), "radecz")

        # if we are dealing with a mockfile, then there is an extra factor to make
        # the average equal that of the data
        if radeczfile.split('/')[-2] == "mocks_hierarchical":

            # have to open the existing json file
            jf = open(nz_dict_file)
            nz_dict = json.load(jf)

            gal_counts = np.histogram(radecz[:, 2], bins=zbinedges)[0]

            nbar_mock = []

            for i in range(len(gal_counts)):

                nbar_mock.append(gal_counts[i] / shell_vols[i])

            nbar_mock = np.array(nbar_mock)

            num_down = np.rint((nz_dict["avg_nbar_down"] / np.average(nbar_mock)) * H[0])
            num_down = num_down.astype(int)

            finmask = np.array(radecz.shape[0] * [False])

            for i, nd in enumerate(num_down):
                """Turn on the right amount of galaxies in each bin."""
                zbin_ids = np.where(((zbinedges[i] < radecz[:, 2]) * \
                                     (radecz[:, 2] <= zbinedges[i + 1])) == True)

                keep = np.random.choice(zbin_ids[0], size=nd, replace=False)

                finmask[keep] = True

            radecz = radecz[finmask]

            # and save to a hdf5 file
            mock_no = radeczfile.split('/')[-1].split('.')[0]
            arr2h5(radecz, "{0}/mocks/rdz_down/{1}.hdf5".format(os.path.dirname(nz_dict_file), mock_no), "radecz")

            jf.close()

    if not radeczfile.split('/')[-2] == "mocks_hierarchical":
        # don't save the json if we're working on a mock
        nf = open(nz_dict_file, 'w')
    
        json.dump(nz_dict, nf, sort_keys=True, indent=4, separators=(',', ':\t'))
    
        nf.close()
Example #27
0
            if len(expected_matches) > 0:
                matches = mt.get_halo_properties(expected_matches, properties, child_aexp)
                for match in matches:
                    print match
                    print "x distance: ", halo["x"] - match["x"]
                    print "y distance: ", halo["y"] - match["y"]
                    print "z distance: ", halo["z"] - match["z"]
                    shared_particles = list( set(map(itemgetter(0),particles_parent[phalo]))
                                    & set(map(itemgetter(0),particles_child[match["id"]])) )
                    print "shared particles: ", len(shared_particles )
                    print "mass ratio: ", float(match["M_hc"])/float(halos["M_hc"])

     #       print "Checking cluster,", parent_aexp, phalo

            #determine time difference between the two epochs in seconds
            t1 = WMAP5.age(1./parent_aexp-1.).value
            t2 = WMAP5.age(1./child_aexp-1.).value
            dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7 #convert to seconds

            #search cube determined by parent halo velocity + parent halo radius
            pos = np.array([halo["x"], halo["y"], halo["z"]])
            vel = np.array([halo["vx"], halo["vy"], halo["vz"]])
            searchbox = mt.define_searchbox(pos, vel, dt, halo["r_hc"], search_distance = search_distance_multiplier)
            print searchbox

            chalos = mt.get_halos_within_distance(child_aexp,searchbox, [],
                                masscut = 0.01*halo["M_hc"])
            print sorted(chalos)
            
            #loop through satellites within search box
            prog_found = False
Example #28
0
#       Planck13.comoving_distance(ar_z))
#
# plt.plot(ar_z, Planck13.comoving_distance(ar_z) / Planck13.comoving_distance(ar_z))
# plt.plot(ar_z, Planck13.comoving_distance(ar_z + ar_delta_z_planck13 - ar_delta_z_wmap7) /
#          Planck13.comoving_distance(ar_z))
# plt.show()

# print(scipy.misc.derivative(func=Planck13.comoving_distance, x0=2, dx=0.1))
# ar_dcmv_dz_planck13 = np.array([scipy.misc.derivative(
#     func=lambda x: Planck13.comoving_distance(x).value, x0=z, dx=0.01) for z in ar_z])
# ar_dcmv_dz_wmap7 = np.array([scipy.misc.derivative(
#     func=lambda x: WMAP7.comoving_distance(x).value, x0=z, dx=0.01) for z in ar_z])
# plt.plot(ar_z, -(ar_dcmv_dz_planck13 - ar_dcmv_dz_wmap7) * ar_delta_z_planck13)
# plt.show()
del scipy.misc

ar_base_cmvd_planck13 = Planck13.comoving_distance(ar_z)
ar_true_planck13_cmvd = Planck13.comoving_distance(ar_z + ar_delta_z_planck13)
ar_base_cmvd_wmap5 = WMAP5.comoving_distance(ar_z)
ar_wmap5_apparent_cmvd = WMAP5.comoving_distance(ar_z + ar_delta_z_planck13)
ar_base_cmvd_wmap7 = WMAP7.comoving_distance(ar_z)
ar_wmap7_apparent_cmvd = WMAP7.comoving_distance(ar_z + ar_delta_z_planck13)
ar_base_cmvd_wmap9 = WMAP9.comoving_distance(ar_z)
ar_wmap9_apparent_cmvd = WMAP9.comoving_distance(ar_z + ar_delta_z_planck13)
plt.plot(ar_z, ar_true_planck13_cmvd - ar_base_cmvd_planck13)
plt.plot(ar_z, ar_wmap5_apparent_cmvd - ar_base_cmvd_wmap5)
plt.plot(ar_z, ar_wmap7_apparent_cmvd - ar_base_cmvd_wmap7)
plt.plot(ar_z, ar_wmap9_apparent_cmvd - ar_base_cmvd_wmap9)
# plt.plot(ar_z, ar_wmap7_apparent_cmvd - ar_true_planck13_cmvd)
plt.show()
def box_completeness(Nsph, simul_cosmo):

    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5

    rad = np.arange(5.0, 66.0, 5.0)

    As = np.arange(0.0, 1.0, 0.05)
    Bs = np.arange(0.0, 1.0, 0.05)

    splarr = np.loadtxt("test_dat/edge_splarr.dat")
    A, B = np.meshgrid(As, Bs)

    inty = interp2d(A[0, :], B[:, 0], splarr)

    # this number from survey I think
    # should be 2% of "sky" area
    # had 138621 for some reason, now used 2% to get
    bad_pts = 1000 * np.random.rand(34834737093, 2)
    bad_r = 0.0004275  # determined from quick calculation for now
    bad_A = np.pi * bad_r ** 2

    badbaum = cKDTree(bad_pts)

    for r_i, r in enumerate(rad):

        spheres = 1000 * np.random.rand(Nsph, 2)

        bound_bool = (spheres[:, 0] < r) + (spheres[:, 1] < r) + \
                     ((1000 - spheres[:, 0]) < r) + \
                     ((1000 - spheres[:, 1]) < r)
        bad_inds = np.where(bound_bool == True)
        badsphs = spheres[bound_bool]

        pickle_bool = ((badsphs[:, 0] ** 2 + badsphs[:, 1] ** 2) < r) + \
               (((1000 - badsphs[:, 0]) ** 2 + (1000 - badsphs[:, 0]) ** 2)
                   < r)
        pickle_inds = bad_inds[pickle_bool]

        for i, sph in enumerate(spheres):

            badvol = 0.

            pierce_pts = badbaum.query_ball_point(sph, r)

            for pt in pierce_pts:
                # retrieve coordinates of points within sphere
                pt_coord = bad_pts[pt]
                # calculate fractional projected distance from centre
                dis = np.sqrt((sph[0] - pt_coord[0]) ** 2 + \
                              (sph[1] - pt_coord[1]) ** 2) / r
                # calculate length pierced through sphere
                l = 2 * np.sqrt(1 - dis ** 2)

                badvol += l * bad_A

            # check if sphere at boundary
            if i in bad_inds:

                if i in pickle_inds:

                    badvol += inty(sph[0] / r, sph[1] / r)

                else:
                    if sph[0] < r:
                        badvol += spherical_cap(1 - sph[0] / r)
                    elif 1000 - sph[0] < r:
                        badvol += spherical_cap(1 - (1000 - sph[0]) / r)

                    if sph[1] < r:
                        badvol += spherical_cap(1 - sph[1] / r)
                    elif 1000 - sph[1] < r:
                        badvol += spherical_cap(1 - (1000 - sph[1]) / r)

            f = open("test_dat/simul_badvol.dat", 'a')
            f.write("{0}\n".format(badvol))
            f.close()
Example #30
0
    def M0_to_kg(M0):
        return M0 * 1.98847e30

    if action == 'redshiftcalc':

        H0 = 70
        distance = float(form.getfirst("distance", ""))
        redshift = round((distance * H0) / (const.c * 1e-3),
                         4)  # const.c is in meters, converting to km

        print(json.dumps({'redshift': redshift}))

    elif action == 'distancecalc':

        redshift = float(form.getfirst("redshift", ""))
        distance = WMAP5.comoving_distance(redshift)

        print(json.dumps({'distance': distance}))

    elif action == 'orbitalsepcalc':

        orbital_p = float(form.getfirst("orbital_p", ""))
        total_m = float(form.getfirst("total_m", ""))
        orbital_s = s_to_yrs(
            np.sqrt((4 * (const.pi**2) * (pc_to_m(orbital_s) / 2)**3) /
                    (const.G * M0_to_kg(total_m))))

        print(json.dumps({'orbital_s': orbital_s}))

    elif action == 'orbitalpercalc':
Example #31
0
vlos = region['z-velocity']*1.e-5 ##km/s

T,P,rho = region['Temperature'], region['Pressure'], region['H_NumberDensity']
#T = [K], P = [dyne/cm^2], rho = [atoms/cm^3]

emiss_HI = region['Emission_LyA']*(4.*np.pi*1.63e-11)
emiss_OVI = region['Emission_OVI']*(4.*np.pi*1.92e-11)
emiss_CIV = region['Emission_CIV']*(4.*np.pi*1.28e-11)

nHI,ne= region['HI_NumberDensity'], region['Electron_NumberDensity']

Z = region['Metallicity'] / 0.02


#setting the ra and dec
dlum_0 = cosmo.luminosity_distance(z_cos)
dlum_0 = dlum_0.to(u.Mpc).value


## this should maybe be using the physical distances...
dra = ((region['x']-pos[0])*pf['Mpc'])/dlum_0
ddec = ((region['y']-pos[1])*pf['Mpc'])/dlum_0

ra_0 = ra_los*u.degree.to(u.radian)
dec_0 = dec_los*u.degree.to(u.radian)

ra = np.zeros(len(x))+ra_0+dra
dec = np.zeros(len(y))+dec_0+ddec


#now the dlum worries about the z component
Example #32
0
def give_proper_kpc(z,angle,unit):
	if len(angle) == len(unit):
		out = cosmo.kpc_proper_per_arcmin(z)*(angle[x]*unit[x]).to(u.arcmin) for x in range(len(angle))]
	else:
		out = None
	return out
Example #33
0
def calc_age_from_aexp(aexp):

   return WMAP5.age(1./float(aexp)-1).value
Example #34
0
def give_angular_size(z,length,unit):
	if len(length) == len(unit):
		out = [cosmo.arcsec_per_kpc_proper(z)*(length[x]*unit[x]).to(u.kpc) for x in range(len(length))]
	else:
		out = None
	return out
Example #35
0
def vpf(dat_dir, Nsph, simul_cosmo, rad):

    # Grab the data coordinates
    gals = h5_arr(
        "./dat/out/{0}/{1}/gals_cart_coords.hdf5".format(dat_dir, simul_cosmo),
        "cart_pts")

    # Get details about the redshift interval being considered
    nbar_dict = json.load(
        open("./dat/out/{0}/{1}/nbar_zrange.json".format(dat_dir,
                                                         simul_cosmo)))
    zlo = nbar_dict["zlo"]
    zhi = nbar_dict["zhi"]

    # Get the search points
    good_pts = h5_arr("./dat/out/{0}/srch_radec.hdf5".format(dat_dir),
                      "good_pts")
    bad_pts = h5_arr("./dat/out/{0}/veto.hdf5".format(dat_dir), "bad_pts")

    # Set angular radius of effective area around bad points
    bad_r = np.arccos(1.0 - (np.pi * 9.8544099e-05) / (2 * 180**2))
    bad_r_deg = np.rad2deg(bad_r)

    # Set the cosmology with h free
    # Here the cosmology is based on WMAP (for first MultiDark simulation)
    if simul_cosmo == "Planck":
        # First make h free
        Planck13.__init__(100.0, Planck13.Om0)
        cosmo = Planck13
    elif simul_cosmo == "WMAP":
        WMAP5.__init__(100.0, WMAP5.Om0)
        cosmo = WMAP5
    comv = cosmo.comoving_distance

    # Build the trees

    # galaxy tree
    gal_baum = cKDTree(gals)

    # tree of bad points (angular coordinates on unit sphere)
    bad_xyz = radec2xyz(bad_pts)
    veto_baum = cKDTree(bad_xyz)

    # Initialise final output arrays
    #    rad = np.arange(1.0, 67.0, 5.0)  doing it one radius at a time
    #    P_0 = np.zeros(rad.shape)

    # No. of spheres and norm
    #     Nsph_arr = Nsph * np.array(4 * [0.01] + 4 * [0.1] + 4 * [1.0])
    #     norm = 1. / Nsph_arr
    #    norm = 1. / Nsph

    rand_i = 0

    for r_i, r in enumerate(rad):

        # start the count of successful voids
        count = 0

        # Custom zrange for sphere size
        dis_near = Distance(comv(zlo).value + r, u.Mpc)
        dis_far = Distance(comv(zhi).value - r, u.Mpc)

        z_a = dis_near.compute_z(cosmology=cosmo)

        z_b = dis_far.compute_z(cosmology=cosmo)

        for i in range(Nsph):  # _arr[r_i]):

            # compensate for finite length of mask file
            rand_i = rand_i % 999999

            radec = good_pts[rand_i, :]

            rang = Angle(radec[0], u.deg)
            decang = Angle(radec[1], u.deg)

            randz = (z_a ** 3 + \
                     (z_b ** 3 - z_a ** 3) * np.random.rand(1)[0]) ** (1. / 3.)
            dis = Distance(comv(randz), u.Mpc)

            coord = ICRSCoordinates(rang, decang, distance=dis)

            sph_cen = np.array([coord.x.value, coord.y.value, coord.z.value])

            nn = gal_baum.query(sph_cen)

            print "rad: ", r, ", sphere: ", i

            if not nn[0] < r:

                # add instance to probability count
                count += 1

                # record quality of sphere using spline values for intersection
                # with bad points

                # Get radius of circular projection of sphere
                R = np.arcsin(r / np.sqrt(np.sum(sph_cen[:]**2)))

                # Get coordinates of circle centre on unit sphere
                crc_cen = radec2xyz(radec)[0]

                # Compute tree search radius from Cosine rule
                # (include points extending beyond sphere edge to account for
                # finite area around bad points)
                l_srch = np.sqrt(2. - 2. * np.cos(R))

                # Run search
                pierce_l = veto_baum.query_ball_point(crc_cen, l_srch)

                bad_vol = 0.

                R = np.degrees(R)  # need in degrees for bad_vol computation

                for pt in pierce_l:

                    pt_ang = bad_pts[pt]
                    dis = np.degrees(central_angle(pt_ang, radec))
                    l = dis / R

                    bad_vol += 1.5 * (bad_r_deg / R) ** 2 \
                                   * np.sqrt(1.0 - l ** 2)

                f_r = open(
                    "./dat/out/{0}/{1}/vpf_out/volfrac_{2}.dat".format(
                        dat_dir, simul_cosmo, r), 'a')
                f_r.write("{0}\n".format(bad_vol))
                f_r.close()

            rand_i += 1
Example #36
0
                    print match
                    print "x distance: ", halo["x"] - match["x"]
                    print "y distance: ", halo["y"] - match["y"]
                    print "z distance: ", halo["z"] - match["z"]
                    shared_particles = list(
                        set(map(itemgetter(0), particles_parent[phalo]))
                        & set(map(itemgetter(0), particles_child[match["id"]]))
                    )
                    print "shared particles: ", len(shared_particles)
                    print "mass ratio: ", float(match["M_hc"]) / float(
                        halos["M_hc"])

    #       print "Checking cluster,", parent_aexp, phalo

    #determine time difference between the two epochs in seconds
            t1 = WMAP5.age(1. / parent_aexp - 1.).value
            t2 = WMAP5.age(1. / child_aexp - 1.).value
            dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7  #convert to seconds

            #search cube determined by parent halo velocity + parent halo radius
            pos = np.array([halo["x"], halo["y"], halo["z"]])
            vel = np.array([halo["vx"], halo["vy"], halo["vz"]])
            searchbox = mt.define_searchbox(
                pos,
                vel,
                dt,
                halo["r_hc"],
                search_distance=search_distance_multiplier)
            print searchbox

            chalos = mt.get_halos_within_distance(child_aexp,