def generate_tracks(mt, aexp_list, z0_clusters):
    """tracks most massive progenitors for reach halo"""

    mt.add_column("is_main_line", "BOOLEAN", table="mergertree", default=0)

    properties = [
        "x", "y", "z", "vx", "vy", "vz", "M_hc", "r_hc", "num_particles"
    ]
    z0_halos = mt.get_halo_properties(z0_clusters, properties, aexp_list[0])

    for cluster in z0_clusters:
        print "Finding progenitor line for z0 id: " + str(cluster)

        tree = mt.get_full_tree(cluster)

        parent = z0_halos[z0_halos["id"] == cluster].squeeze()

        progenitor_line = []

        for i, aexp in enumerate(aexp_list[1:]):

            parent_id = parent["id"]

            progenitors = tree[(tree['aexp'] == aexp)
                               & (tree['parent_id'] == parent_id)]

            #this can be pandas-ized
            t1 = WMAP5.age(1. / aexp_list[i - 1] - 1.)
            t2 = WMAP5.age(1. / aexp_list[i] - 1.)
            dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7  #convert to seconds

            #apply smaller searchbox to exclude far off merger candidates
            searchbox = ut.define_searchbox(parent,
                                            dt,
                                            mt.boxsize,
                                            search_distance=1.5)

            #identify most massive progenitor
            if not progenitors.empty:
                main_progenitor = identify_most_massive_progenitor(
                    progenitors, searchbox=searchbox)
                if main_progenitor is None:
                    break

                mt.mark_main_line(main_progenitor['aexp'],
                                  main_progenitor['id'],
                                  parent_id=main_progenitor['parent_id'])
                parent = main_progenitor
                progenitor_line.append(parent["id"])

            else:
                break

        print "end of tree for cluster " + str(cluster)
        print progenitor_line

        mt.commit()
def generate_tracks(mt, aexp_list, z0_clusters) :
    """tracks most massive progenitors for reach halo"""

    mt.add_column("is_main_line", "BOOLEAN", table="mergertree", default=0)

    properties = ["x", "y", "z", "vx", "vy", "vz", "M_hc", "r_hc", "num_particles"]
    z0_halos = mt.get_halo_properties(z0_clusters, properties, aexp_list[0])

    for cluster in z0_clusters:
        print "Finding progenitor line for z0 id: "+str(cluster)

        tree = mt.get_full_tree(cluster)

        parent = z0_halos[z0_halos["id"] == cluster].squeeze()

        progenitor_line = []

        for i,aexp in enumerate(aexp_list[1:]):

            parent_id = parent["id"]

            progenitors = tree[(tree['aexp'] == aexp) & 
                               (tree['parent_id'] == parent_id)]

            #this can be pandas-ized
            t1 = WMAP5.age(1./aexp_list[i-1]-1.)
            t2 = WMAP5.age(1./aexp_list[i]-1.)
            dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7 #convert to seconds
       
            #apply smaller searchbox to exclude far off merger candidates
            searchbox = ut.define_searchbox(parent, dt,  mt.boxsize,
                                            search_distance=1.5)

            #identify most massive progenitor
            if not progenitors.empty :
                main_progenitor = identify_most_massive_progenitor(progenitors, 
                                                                   searchbox=searchbox)
                if main_progenitor is None:
                    break

                mt.mark_main_line(main_progenitor['aexp'], main_progenitor['id'],
                                    parent_id = main_progenitor['parent_id'])
                parent = main_progenitor
                progenitor_line.append(parent["id"])

            else:
                break

        print "end of tree for cluster "+str(cluster)
        print progenitor_line 

        mt.commit()
Exemplo n.º 3
0
def load_tmerger_from_file(sim, dir, aexp=1.0):

    last_merger = sim.load_last_merger_epochs(dir,aexp)

    tmerger = {}

    for id in last_merger.keys():
        merger_z = (1./float(last_merger[id])-1)
        merger_time = WMAP5.age(merger_z)
        cosmic_age = WMAP5.age(1./float(aexp)-1)

#        merger_time = cosmocalc(merger_z)["zage_Gyr"]
#        cosmic_age = cosmocalc(1./float(aexp)-1)["zage_Gyr"]

        tmerger[id] = cosmic_age - merger_time

    return tmerger
Exemplo n.º 4
0
def load_tmerger_from_file(sim, dir, aexp=1.0):

    last_merger = sim.load_last_merger_epochs(dir,aexp)

    tmerger = {}

    for id in last_merger.keys():
        merger_z = (1./float(last_merger[id])-1)
        merger_time = WMAP5.age(merger_z)
        cosmic_age = WMAP5.age(1./float(aexp)-1)

#        merger_time = cosmocalc(merger_z)["zage_Gyr"]
#        cosmic_age = cosmocalc(1./float(aexp)-1)["zage_Gyr"]

        tmerger[id] = cosmic_age - merger_time

    return tmerger
Exemplo n.º 5
0
def calc_age_from_aexp(aexp):

   return WMAP5.age(1./float(aexp)-1).value
Exemplo n.º 6
0
            if len(expected_matches) > 0:
                matches = mt.get_halo_properties(expected_matches, properties, child_aexp)
                for match in matches:
                    print match
                    print "x distance: ", halo["x"] - match["x"]
                    print "y distance: ", halo["y"] - match["y"]
                    print "z distance: ", halo["z"] - match["z"]
                    shared_particles = list( set(map(itemgetter(0),particles_parent[phalo]))
                                    & set(map(itemgetter(0),particles_child[match["id"]])) )
                    print "shared particles: ", len(shared_particles )
                    print "mass ratio: ", float(match["M_hc"])/float(halos["M_hc"])

     #       print "Checking cluster,", parent_aexp, phalo

            #determine time difference between the two epochs in seconds
            t1 = WMAP5.age(1./parent_aexp-1.).value
            t2 = WMAP5.age(1./child_aexp-1.).value
            dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7 #convert to seconds

            #search cube determined by parent halo velocity + parent halo radius
            pos = np.array([halo["x"], halo["y"], halo["z"]])
            vel = np.array([halo["vx"], halo["vy"], halo["vz"]])
            searchbox = mt.define_searchbox(pos, vel, dt, halo["r_hc"], search_distance = search_distance_multiplier)
            print searchbox

            chalos = mt.get_halos_within_distance(child_aexp,searchbox, [],
                                masscut = 0.01*halo["M_hc"])
            print sorted(chalos)
            
            #loop through satellites within search box
            prog_found = False
def generate_links(mt, aexp_list, z0_clusters, particles_dir) :
    """Generates links from halo particle data"""

    if restart:
        phalos, last_aexp = mt.restart_mergertree()
        print "restarting merger tree code from %0.4f" % last_aexp
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir, last_aexp)

    else:
        phalos = z0_clusters
        z0id_0 = dict((phalo, phalo) for phalo in phalos)
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir, aexp_list[0])
    
    particles_parent = cart_io.read_halo_particles(particles_file, clusters=phalos)

    for i0 in range(len(aexp_list)-1):

        i1 = i0 + 1 
        parent_aexp = aexp_list[i0]
        child_aexp = aexp_list[i1]

        if restart and child_aexp >= last_aexp:
            continue

        t1 = WMAP5.age(1./parent_aexp-1.)
        t2 = WMAP5.age(1./child_aexp-1.)
        dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7 #convert to seconds

        print "Finding matches for %0.4f, %0.4f" % (parent_aexp, child_aexp)
        print str(len(phalos))+" halos identified for matching"

        #get halo properties from database
        properties = ["x","y","z", "vx","vy","vz", "M_hc", "r_hc", "num_particles"]
        halos = mt.get_halo_properties(phalos, properties, parent_aexp)

        #load child_particles
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir, child_aexp)
        particles_child = cart_io.read_halo_particles(particles_file, min_np = 0)

        links_data = []
        z0id_1 = {}
        #loop through parent halos

        for phalo_id in phalos:
            
            phalo = halos[halos["id"] == phalo_id].squeeze()

            logging.debug( "Checking cluster,", parent_aexp, phalo )

            searchbox = ut.define_searchbox(phalo, dt,  mt.boxsize, 
                                            search_distance=search_distance_multiplier)

            chalos = mt.get_halos_within_distance(child_aexp, searchbox, ["x","y","z"],
                            mass_cut = 0.01*phalo["M_hc"])


            prog_found = False
            if not chalos.empty:            

                z0id = z0id_0[phalo_id]

                this_parent_particles_set = set(map(itemgetter(0),particles_parent[phalo_id]))
                min_joint_particles = min_joint_particles_factor * len(this_parent_particles_set)

                #loop through satellites within search box
                for i, chalo in chalos.iterrows():

                    chalo_id = chalo["id"]

                    #consider redoing with np.intersect1d(parent, child)
                    shared_particles = this_parent_particles_set.intersection(map(itemgetter(0),particles_child[chalo_id]))
                
                    if len(shared_particles) > min_joint_particles:

                        ratio = float(len(particles_child[chalo_id]))/float(len(particles_parent[phalo_id])) 
                        logging.debug( "Possible progenitor found: ", chalo_id, len(shared_particles), ratio)

                        distance = ut.distance_between_halos(chalo, phalo, mt.boxsize)

                        links_data.append((parent_aexp, child_aexp, phalo_id, chalo_id, len(shared_particles),
                                           ratio, distance, z0id, 0 ))
                        z0id_1[chalo_id] = z0id
                        prog_found = True

            if not prog_found:
                logging.debug("No matches found for halo "+str(phalo_id)+" at aexp = "+str(child_aexp))
                mt.mark_leaf(parent_aexp, phalo_id)
        
        mt.insert("mergertree", links_data)            
        mt.commit()

        z0id_0 = z0id_1
        phalos = z0id_0.keys()
        particles_parent = particles_child
def generate_links(mt, aexp_list, z0_clusters, particles_dir):
    """Generates links from halo particle data"""

    if restart:
        phalos, last_aexp = mt.restart_mergertree()
        print "restarting merger tree code from %0.4f" % last_aexp
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir,
                                                           last_aexp)

    else:
        phalos = z0_clusters
        z0id_0 = dict((phalo, phalo) for phalo in phalos)
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir,
                                                           aexp_list[0])

    particles_parent = cart_io.read_halo_particles(particles_file,
                                                   clusters=phalos)

    for i0 in range(len(aexp_list) - 1):

        i1 = i0 + 1
        parent_aexp = aexp_list[i0]
        child_aexp = aexp_list[i1]

        if restart and child_aexp >= last_aexp:
            continue

        t1 = WMAP5.age(1. / parent_aexp - 1.)
        t2 = WMAP5.age(1. / child_aexp - 1.)
        dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7  #convert to seconds

        print "Finding matches for %0.4f, %0.4f" % (parent_aexp, child_aexp)
        print str(len(phalos)) + " halos identified for matching"

        #get halo properties from database
        properties = [
            "x", "y", "z", "vx", "vy", "vz", "M_hc", "r_hc", "num_particles"
        ]
        halos = mt.get_halo_properties(phalos, properties, parent_aexp)

        #load child_particles
        particles_file = "%s/halo_particles_a%0.4f.dat" % (particles_dir,
                                                           child_aexp)
        particles_child = cart_io.read_halo_particles(particles_file, min_np=0)

        links_data = []
        z0id_1 = {}
        #loop through parent halos

        for phalo_id in phalos:

            phalo = halos[halos["id"] == phalo_id].squeeze()

            logging.debug("Checking cluster,", parent_aexp, phalo)

            searchbox = ut.define_searchbox(
                phalo,
                dt,
                mt.boxsize,
                search_distance=search_distance_multiplier)

            chalos = mt.get_halos_within_distance(child_aexp,
                                                  searchbox, ["x", "y", "z"],
                                                  mass_cut=0.01 *
                                                  phalo["M_hc"])

            prog_found = False
            if not chalos.empty:

                z0id = z0id_0[phalo_id]

                this_parent_particles_set = set(
                    map(itemgetter(0), particles_parent[phalo_id]))
                min_joint_particles = min_joint_particles_factor * len(
                    this_parent_particles_set)

                #loop through satellites within search box
                for i, chalo in chalos.iterrows():

                    chalo_id = chalo["id"]

                    #consider redoing with np.intersect1d(parent, child)
                    shared_particles = this_parent_particles_set.intersection(
                        map(itemgetter(0), particles_child[chalo_id]))

                    if len(shared_particles) > min_joint_particles:

                        ratio = float(len(particles_child[chalo_id])) / float(
                            len(particles_parent[phalo_id]))
                        logging.debug("Possible progenitor found: ", chalo_id,
                                      len(shared_particles), ratio)

                        distance = ut.distance_between_halos(
                            chalo, phalo, mt.boxsize)

                        links_data.append(
                            (parent_aexp, child_aexp, phalo_id, chalo_id,
                             len(shared_particles), ratio, distance, z0id, 0))
                        z0id_1[chalo_id] = z0id
                        prog_found = True

            if not prog_found:
                logging.debug("No matches found for halo " + str(phalo_id) +
                              " at aexp = " + str(child_aexp))
                mt.mark_leaf(parent_aexp, phalo_id)

        mt.insert("mergertree", links_data)
        mt.commit()

        z0id_0 = z0id_1
        phalos = z0id_0.keys()
        particles_parent = particles_child
Exemplo n.º 9
0
Create a custom cosmology object
>>> from astropy.cosmology import FlatLambdaCDM
>>> cosmo = FlatLambdaCDM(H0=70, Om0=0.3)
>>> cosmo
FlatLambdaCDM(H0=70, Om0=0.3, Ode0=0.7)

Compute the comoving volume to z=6.5 in cubic Mpc using
this cosmology
>>> cosmo.comoving_volume(6.5)
2521696198211.6924

Compute the age of the universe in Gyr using the
pre-defined WMAP 5-year and WMAP 9-year cosmologies
>>> from astropy.cosmology import WMAP5, WMAP9
>>> WMAP5.age(0)
13.723782349795023
>>> WMAP9.age(0)
13.768899510689097

Create a cosmology with a varying `w'
>>> from astropy.cosmology import Flatw0waCDM
>>> cosmo = Flatw0waCDM(H0=70, Om0=0.3, w0=-1, wa=0.2)

Find the separation in proper kpc at z=4 corresponding to
10 arcsec in this cosmology compared to a WMAP9 cosmology
>>> cosmo.kpc_proper_per_arcmin(4) * 10 / 60.
68.87214405278925
>>> WMAP9.kpc_proper_per_arcmin(4) * 10 / 60.
71.21374615575363
Exemplo n.º 10
0
                    print match
                    print "x distance: ", halo["x"] - match["x"]
                    print "y distance: ", halo["y"] - match["y"]
                    print "z distance: ", halo["z"] - match["z"]
                    shared_particles = list(
                        set(map(itemgetter(0), particles_parent[phalo]))
                        & set(map(itemgetter(0), particles_child[match["id"]]))
                    )
                    print "shared particles: ", len(shared_particles)
                    print "mass ratio: ", float(match["M_hc"]) / float(
                        halos["M_hc"])

    #       print "Checking cluster,", parent_aexp, phalo

    #determine time difference between the two epochs in seconds
            t1 = WMAP5.age(1. / parent_aexp - 1.).value
            t2 = WMAP5.age(1. / child_aexp - 1.).value
            dt = math.fabs(t1 - t2) * 1.0e9 * 3.15569e7  #convert to seconds

            #search cube determined by parent halo velocity + parent halo radius
            pos = np.array([halo["x"], halo["y"], halo["z"]])
            vel = np.array([halo["vx"], halo["vy"], halo["vz"]])
            searchbox = mt.define_searchbox(
                pos,
                vel,
                dt,
                halo["r_hc"],
                search_distance=search_distance_multiplier)
            print searchbox

            chalos = mt.get_halos_within_distance(child_aexp,
Exemplo n.º 11
0
def calc_age_from_aexp(aexp):

   return WMAP5.age(1./float(aexp)-1).value