Пример #1
0
    def get_halo_mask(self, mask_file=None, map_file=None, **kwargs):
        """
        Gets a halo mask from a file or makes a new one.
        :param mask_file (str): specify an hdf5 file to output the halo mask.
        :param map_file (str): specify a text file to output the halo map 
               (locations in image of halos).
        """

        # Get halo map if map_file given.
        if map_file is not None and not os.path.exists(map_file):
            light_cone_halo_map(self, map_file=map_file, **kwargs)

        # Check if file already exists.
        if mask_file is not None and os.path.exists(mask_file):
            mylog.info('Reading halo mask from %s.' % mask_file)
            input = h5py.File(mask_file, 'r')
            self.halo_mask = input['HaloMask'].value
            input.close()

        # Otherwise, make a halo mask.
        else:
            halo_mask_cube = light_cone_halo_mask(self,
                                                  mask_file=mask_file,
                                                  **kwargs)
            # Collapse cube into final mask.
            if ytcfg.getint("yt", "__parallel_rank") == 0:
                self.halo_mask = na.ones(shape=(self.pixels, self.pixels),
                                         dtype=bool)
                for mask in halo_mask_cube:
                    self.halo_mask *= mask
            del halo_mask_cube
Пример #2
0
def light_cone_halo_mask(lightCone, cube_file=None, mask_file=None, **kwargs):
    "Make a boolean mask to cut clusters out of light cone projections."

    pixels = int(lightCone.field_of_view_in_arcminutes * 60.0 /
                 lightCone.image_resolution_in_arcseconds)

    light_cone_mask = []

    # Loop through files in light cone solution and get virial quantities.
    for slice in lightCone.light_cone_solution:
        halo_list = _get_halo_list(slice['filename'], **kwargs)
        light_cone_mask.append(_make_slice_mask(slice, halo_list, pixels))

    # Write out cube of masks from each slice.
    if cube_file is not None and ytcfg.getint("yt", "__parallel_rank") == 0:
        mylog.info("Saving halo mask cube to %s." % cube_file)
        output = h5py.File(cube_file, 'a')
        output.create_dataset('haloMaskCube', data=na.array(light_cone_mask))
        output.close()

    # Write out final mask.
    if mask_file is not None and ytcfg.getint("yt", "__parallel_rank") == 0:
        # Final mask is simply the product of the mask from each slice.
        mylog.info("Saving halo mask to %s." % mask_file)
        finalMask = na.ones(shape=(pixels, pixels))
        for mask in light_cone_mask:
            finalMask *= mask

        output = h5py.File(mask_file, 'a')
        output.create_dataset('HaloMask', data=na.array(finalMask))
        output.close()

    return light_cone_mask
Пример #3
0
def _write_seed_file(seedList, filename):
    "Write list of random seeds to a file."

    mylog.info("Writing random seed list to %s." % filename)

    f = open(filename, 'w')
    for seed in seedList:
        if seed['recycle'] is None:
            f.write("%s\n" % seed['master'])
        else:
            f.write("%s, %s\n" % (seed['master'], seed['recycle']))
    f.close()
Пример #4
0
    def _find_likely_children(self, parentfile, childfile):
        # For each halo in the parent list, identify likely children in the 
        # list of children.
        
        # First, read in the locations of the child halos.
        child_pf = lagos.EnzoStaticOutput(childfile)
        child_t = child_pf['CurrentTimeIdentifier']
        line = "SELECT SnapHaloID, CenMassX, CenMassY, CenMassZ FROM \
        Halos WHERE SnapCurrentTimeIdentifier = %d" % child_t
        self.cursor.execute(line)
        
        mylog.info("Finding likely parents for z=%1.5f child halos." % \
            child_pf["CosmologyCurrentRedshift"])
        
        # Build the kdtree for the children by looping over the fetched rows.
        child_points = []
        for row in self.cursor:
            p = Point()
            p.data = [row[1], row[2], row[3]]
            p.ID = row[0]
            child_points.append(p)
        child_kd = buildKdHyperRectTree(child_points[:],10)

        # Make these just once.
        neighbors = Neighbors()
        neighbors.k = 5

        # Find the parent points from the database.
        parent_pf = lagos.EnzoStaticOutput(parentfile)
        parent_t = parent_pf['CurrentTimeIdentifier']
        line = "SELECT SnapHaloID, CenMassX, CenMassY, CenMassZ FROM \
        Halos WHERE SnapCurrentTimeIdentifier = %d" % parent_t
        self.cursor.execute(line)

        # Loop over the returned rows, and find the likely neighbors for the
        # parents.
        candidates = {}
        for row in self.cursor:
            neighbors.points = []
            neighbors.minDistanceSquared = 100. # should make this a function of the simulation
            cm = [row[1], row[2], row[3]]
            getKNN(cm, child_kd, neighbors, 0., [1.]*3)
            nIDs = []
            for n in neighbors.points:
                nIDs.append(n[1].ID)
            if len(nIDs) < 5:
                # We need to fill in fake halos if there aren't enough halos,
                # which can happen at high redshifts.
                while len(nIDs) < 5:
                    nIDs.append(-1)
            candidates[row[0]] = nIDs
        
        self.candidates = candidates
Пример #5
0
 def _run_halo_finder_add_to_db(self):
     for file in self.restart_files:
         pf = lagos.EnzoStaticOutput(file)
         # If the halos are already found, skip this one.
         dir = os.path.dirname(file)
         if os.path.exists(os.path.join(dir, 'MergerHalos.out')) and \
                 os.path.exists(os.path.join(dir, 'MergerHalos.txt')) and \
                 glob.glob(os.path.join(dir, 'MergerHalos*h5')) is not []:
             pass
         else:
             # Run the halo finder.
             if self.halo_finder_function == yt.lagos.HaloFinding.FOFHaloFinder:
                 halos = self.halo_finder_function(pf,
                     link=self.FOF_link_length, dm_only=True)
             else:
                 halos = self.halo_finder_function(pf,
                     threshold=self.halo_finder_threshold, dm_only=True)
             halos.write_out(os.path.join(dir, 'MergerHalos.out'))
             halos.write_particle_lists(os.path.join(dir, 'MergerHalos'))
             halos.write_particle_lists_txt(os.path.join(dir, 'MergerHalos'))
             del halos
         # Now add halo data to the db if it isn't already there by
         # checking the first halo.
         currt = pf['CurrentTimeIdentifier']
         line = "SELECT GlobalHaloID from Halos where SnapHaloID=0\
         and SnapCurrentTimeIdentifier=%d;" % currt
         self.cursor.execute(line)
         result = self.cursor.fetchone()
         if result != None:
             continue
         red = pf['CosmologyCurrentRedshift']
         # Read the halos off the disk using the Halo Profiler tools.
         hp = HP.HaloProfiler(file, halo_list_file='MergerHalos.out',
         halo_list_format={'id':0, 'mass':1, 'numpart':2, 'center':[7, 8, 9], 'velocity':[10, 11, 12], 'r_max':13})
         mylog.info("Entering halos into database for z=%f" % red)
         if self.mine == 0:
             for ID,halo in enumerate(hp.all_halos):
                 numpart = int(halo['numpart'])
                 values = (None, currt, red, ID, halo['mass'], numpart,
                 halo['center'][0], halo['center'][1], halo['center'][2],
                 halo['velocity'][0], halo['velocity'][1], halo['velocity'][2],
                 halo['r_max'],
                 -1,0.,-1,0.,-1,0.,-1,0.,-1,0.)
                 # 23 question marks for 23 data columns.
                 line = ''
                 for i in range(23):
                     line += '?,'
                 # Pull off the last comma.
                 line = 'INSERT into Halos VALUES (' + line[:-1] + ')'
                 self.cursor.execute(line, values)
             self.conn.commit()
         self._barrier()
         del hp
Пример #6
0
    def imagine_minimal_splice(
            self,
            initial_redshift,
            final_redshift,
            decimals=3,
            filename=None,
            redshift_output_string='CosmologyOutputRedshift',
            start_index=0):
        """
        Create imaginary list of redshift outputs to maximally span a redshift interval.
        :param decimals (int): The decimal place to which the output redshift will be rounded.  
               If the decimal place in question is nonzero, the redshift will be rounded up to 
               ensure continuity of the splice.  Default: 3.
        :param filename (str): If provided, a file will be written with the redshift outputs in 
               the form in which they should be given in the enzo parameter file.  Default: None.
        :param redshift_output_string (str): The parameter accompanying the redshift outputs in the 
               enzo parameter file.  Default: "CosmologyOutputRedshift".
        :param start_index (int): The index of the first redshift output.  Default: 0.
        """

        z = initial_redshift
        outputs = []

        while z > final_redshift:
            rounded = na.round(z, decimals=decimals)
            if rounded - z < 0:
                rounded += na.power(10.0, (-1.0 * decimals))
            z = rounded

            deltaz_max = deltaz_forward(
                self.cosmology, z,
                self.enzoParameters['CosmologyComovingBoxSize'])
            outputs.append({'redshift': z, 'deltazMax': deltaz_max})
            z -= deltaz_max

        mylog.info(
            "imagine_maximal_splice: Needed %d data dumps to get from z = %f to %f."
            % (len(outputs), initial_redshift, final_redshift))

        if filename is not None:
            mylog.info("Writing redshift dump list to %s." % filename)
            f = open(filename, 'w')
            for q, output in enumerate(outputs):
                z_string = "%%s[%%d] = %%.%df" % decimals
                f.write(("%s[%d] = %." + str(decimals) + "f\n") %
                        (redshift_output_string,
                         (q + start_index), output['redshift']))
            f.close()

        return outputs
Пример #7
0
    def _read_halo_list(self, listFile):
        """
        Read halo list from aue file.
        Allow for columnar data in varying formats.
        """
        def __isE(arg):
            parts = arg.lower().split('e')
            if len(parts) != 2: return False
            return not (True in [q.isalpha() for q in ''.join(parts)])

        def __get_num(arg):
            if __isE(arg):
                return float(arg)
            if arg != arg.swapcase():
                return arg
            return float(arg)

        mylog.info("Reading halo information from %s." % listFile)
        haloList = []
        listLines = file(listFile)

        fields = self.halo_list_format.keys()
        getID = not 'id' in fields
        getR_max = not 'r_max' in fields

        for line in listLines:
            line = line.strip()
            if not (line.startswith('#')):
                halo = {}
                onLine = line.split()
                for field in fields:
                    if isinstance(self.halo_list_format[field],
                                  types.ListType):
                        halo[field] = [
                            __get_num(onLine[q])
                            for q in self.halo_list_format[field]
                        ]
                    else:
                        halo[field] = __get_num(
                            onLine[self.halo_list_format[field]])
                if getID: halo['id'] = len(haloList)
                if getR_max:
                    halo['r_max'] = self.halo_radius * self.pf.units['mpc']
                else:
                    halo['r_max'] *= self.pf.units['mpc']
                haloList.append(halo)

        mylog.info("Loaded %d halos." % (len(haloList)))
        return haloList
Пример #8
0
    def _write_filtered_halo_list(self, filename, format="%s"):
        """
        Write out list of filtered halos along with any quantities 
        picked up during the filtering process.
        """

        if len(self.filtered_halos) == 0:
            mylog.error("No halos in filtered list.")
            return

        filename = "%s/%s" % (self.pf.fullpath, filename)
        mylog.info("Writing filtered halo list to %s." % filename)
        file = open(filename, "w")
        fields = [field for field in sorted(self.filtered_halos[0])]
        halo_fields = []
        for halo_field in self.filter_quantities:
            if halo_field in fields:
                fields.remove(halo_field)
                halo_fields.append(halo_field)
        # Make it so number of fields in header is same as number of data columns.
        header_fields = []
        for halo_field in halo_fields:
            if isinstance(self.filtered_halos[0][halo_field], types.ListType):
                header_fields.extend([
                    "%s[%d]" % (halo_field, q)
                    for q in range(len(self.filtered_halos[0][halo_field]))
                ])
            else:
                header_fields.append(halo_field)
        file.write("# ")
        file.write("\t".join(header_fields + fields + ["\n"]))

        for halo in self.filtered_halos:
            for halo_field in halo_fields:
                if isinstance(halo[halo_field], types.ListType):
                    field_data = na.array(halo[halo_field])
                    field_data.tofile(file, sep="\t", format=format)
                else:
                    if halo_field == 'id':
                        file.write("%04d" % halo[halo_field])
                    else:
                        file.write("%s" % halo[halo_field])
                file.write("\t")
            field_data = na.array([halo[field] for field in fields])
            field_data.tofile(file, sep="\t", format=format)
            file.write("\n")
        file.close()
Пример #9
0
 def _check_for_needed_profile_fields(self):
     "Make sure CellVolume and TotalMass fields are added so virial quantities can be calculated."
     all_profile_fields = [hp['field'] for hp in self.profile_fields]
     if not 'CellVolume' in all_profile_fields:
         mylog.info(
             "Adding CellVolume field to so virial quantities can be calculated"
         )
         self.add_profile('CellVolume',
                          weight_field=None,
                          accumulation=True)
     if not 'TotalMassMsun' in all_profile_fields:
         mylog.info(
             "Adding TotalMassMsun field to so virial quantities can be calculated"
         )
         self.add_profile('TotalMassMsun',
                          weight_field=None,
                          accumulation=True)
Пример #10
0
    def _load_halo_data(self, filename=None):
        "Read hop output file or run hop if it doesn't exist."

        # Don't run if hop data already loaded.
        if self.all_halos:
            return

        if filename is None:
            filename = self.halo_list_file

        hopFile = "%s/%s" % (self.pf.fullpath, filename)

        if not (os.path.exists(hopFile)):
            mylog.info("Hop file not found, running hop to get halos.")
            self._run_hop(hopFile)

        self.all_halos = self._read_halo_list(hopFile)
Пример #11
0
 def find_max_cell_location(self, field, finest_levels=True):
     if finest_levels is True:
         gi = (self.grid_levels >= self.max_level - NUMTOCHECK).ravel()
         source = self.grid_collection([0.0] * 3, self.grids[gi])
     else:
         source = self.all_data()
     mylog.debug("Searching %s grids for maximum value of %s",
                 len(source._grids), field)
     max_val, maxi, mx, my, mz, mg = \
         source.quantities["MaxLocation"]( field, lazy_reader=True)
     max_grid = self.grids[mg]
     mc = na.unravel_index(maxi, max_grid.ActiveDimensions)
     mylog.info("Max Value is %0.5e at %0.16f %0.16f %0.16f in grid %s at level %s %s", \
           max_val, mx, my, mz, max_grid, max_grid.Level, mc)
     self.parameters["Max%sValue" % (field)] = max_val
     self.parameters["Max%sPos" % (field)] = "%s" % ((mx, my, mz), )
     return max_grid, mc, max_val, na.array((mx, my, mz), dtype='float64')
Пример #12
0
def _read_seed_file(filename):
    "Read list of random seeds from a file."

    mylog.info("Reading random seed list from %s." % filename)

    seedList = []

    lines = file(filename)
    for line in lines:
        if line[0] != '#':
            line = line.strip()
            onLine = line.split(',')
            if (len(onLine) == 1):
                seedList.append({'master': onLine[0], 'recycle': None})
            else:
                seedList.append({'master': onLine[0], 'recycle': onLine[1]})

    return seedList
Пример #13
0
def light_cone_halo_map(lightCone, map_file='halo_map.out', **kwargs):
    "Make a text list of location of halos in a light cone image with virial quantities."

    haloMap = []

    # Loop through files in light cone solution and get virial quantities.
    for slice in lightCone.light_cone_solution:
        halo_list = _get_halo_list(slice['filename'], **kwargs)
        haloMap.extend(_make_slice_halo_map(slice, halo_list))

    # Write out file.
    if ytcfg.getint("yt", "__parallel_rank") == 0:
        mylog.info("Saving halo map to %s." % map_file)
        f = open(map_file, 'w')
        f.write("#z       x         y        M [Msun]  R [Mpc]   R [image]\n")
        for halo in haloMap:
            f.write("%7.4f %9.6f %9.6f %9.3e %9.3e %9.3e\n" % \
                        (halo['redshift'], halo['x'], halo['y'],
                         halo['mass'], halo['radiusMpc'], halo['radiusImage']))
        f.close()
Пример #14
0
 def read_haloes(self):
     """
     Read in the virial masses of the haloes.
     """
     mylog.info("Reading halo masses from %s" % self.halo_file)
     f = open(self.halo_file, 'r')
     line = f.readline()
     if line == "":
         self.haloes = na.array([])
         return
     while line[0] == '#':
         line = f.readline()
     self.haloes = []
     while line:
         line = line.split()
         mass = float(line[self.mass_column])
         if mass > 0:
             self.haloes.append(float(line[self.mass_column]))
         line = f.readline()
     f.close()
     self.haloes = na.array(self.haloes)
Пример #15
0
 def find_min(self, field):
     """
     Returns (value, center) of location of minimum for a given field
     """
     gI = na.where(self.grid_levels >= 0)  # Slow but pedantic
     minVal = 1e100
     for grid in self.grids[gI[0]]:
         mylog.debug("Checking %s (level %s)", grid.id, grid.Level)
         val, coord = grid.find_min(field)
         if val < minVal:
             minCoord = coord
             minVal = val
             minGrid = grid
     mc = na.array(minCoord)
     pos = minGrid.get_position(mc)
     mylog.info("Min Value is %0.5e at %0.16f %0.16f %0.16f in grid %s at level %s", \
           minVal, pos[0], pos[1], pos[2], minGrid, minGrid.Level)
     self.center = pos
     self.parameters["Min%sValue" % (field)] = minVal
     self.parameters["Min%sPos" % (field)] = "%s" % (pos)
     return minVal, pos
Пример #16
0
 def __init__(self, halos=None, database='halos.db',
         dotfile='MergerTree.gv', current_time=None, link_min=0.2):
     self.database = database
     self.link_min = link_min
     if halos is None:
         mylog.error("Please provide at least one halo to start the tree. Exiting.")
         return None
     result = self._open_database()
     if not result:
         return None
     if type(halos) == types.IntType:
         halos = [halos]
     if current_time is not None:
         halos = self._translate_haloIDs(halos, current_time)
     newhalos = set(halos)
     # A key is the GlobalHaloID for this halo, and the content is a
     # Node object.
     self.nodes = {}
     # A key is the GlobalHaloID for the parent in the relationship,
     # and the content is a Link ojbect.
     self.links = defaultdict(Link)
     # Record which halos are at the same z level for convenience.
     # They key is a z value, and the content a list of co-leveled halo IDs.
     self.levels = defaultdict(list)
     # For the first set of halos.
     self._add_nodes(newhalos)
     # Recurse over parents.
     while len(newhalos) > 0:
         mylog.info("Finding parents for %d children." % len(newhalos))
         newhalos = self._find_parents(newhalos)
         self._add_nodes(newhalos)
     mylog.info("Writing out to disk.")
     self._open_dot(dotfile)
     self._write_nodes()
     self._write_links()
     self._write_levels()
     self._close_dot()
     self._close_database()
Пример #17
0
    def _save_light_cone_solution(self, filename="light_cone.dat"):
        "Write out a text file with information on light cone solution."

        mylog.info("Saving light cone solution to %s." % filename)

        f = open(filename, 'w')
        if self.recycleSolution:
            f.write("Recycled Solution\n")
            f.write("OriginalRandomSeed = %s\n" % self.originalRandomSeed)
            f.write("RecycleRandomSeed = %s\n" % self.recycleRandomSeed)
        else:
            f.write("Original Solution\n")
            f.write("OriginalRandomSeed = %s\n" % self.originalRandomSeed)
        f.write("EnzoParameterFile = %s\n" % self.EnzoParameterFile)
        f.write("\n")
        for q, output in enumerate(self.light_cone_solution):
            f.write(
                "Proj %04d, %s, z = %f, depth/box = %f, width/box = %f, axis = %d, center = %f, %f, %f\n"
                %
                (q, output['filename'], output['redshift'],
                 output['DepthBoxFraction'], output['WidthBoxFraction'],
                 output['ProjectionAxis'], output['ProjectionCenter'][0],
                 output['ProjectionCenter'][1], output['ProjectionCenter'][2]))
        f.close()
Пример #18
0
from DataReadingFuncs import *
from ClusterFiles import *
from ContourFinder import *
from Clump import *
from ParticleIO import *
from BaseDataTypes import *
from BaseGridType import *
from EnzoRateData import *
from ObjectFindingMixin import *
from HierarchyType import *
from OutputTypes import *
from Profiles import *

from HaloFinding import *

# We load plugins.  Keep in mind, this can be fairly dangerous -
# the primary purpose is to allow people to have a set of functions
# that get used every time that they don't have to *define* every time.
# This way, other command-line tools can be used very simply.
# Unfortunately, for now, I think the easiest and simplest way of doing
# this is also the most dangerous way.
if ytcfg.getboolean("lagos", "loadfieldplugins"):
    my_plugin_name = ytcfg.get("lagos", "pluginfilename")
    # We assume that it is with respect to the $HOME/.yt directory
    fn = os.path.expanduser("~/.yt/%s" % my_plugin_name)
    if os.path.isfile(fn):
        mylog.info("Loading plugins from %s", fn)
        execfile(fn)

log_fields = []  # @todo: GET RID OF THIS
Пример #19
0
    def _compute_child_fraction(self, parentfile, childfile):
        # Given a parent and child snapshot, and a list of child candidates,
        # compute what fraction of the parent halo goes to each of the children.
        
        parent_pf = lagos.EnzoStaticOutput(parentfile)
        child_pf = lagos.EnzoStaticOutput(childfile)
        parent_currt = parent_pf['CurrentTimeIdentifier']
        child_currt = child_pf['CurrentTimeIdentifier']
        
        mylog.info("Computing fractional contribututions of particles to z=%1.5f halos." % \
            child_pf['CosmologyCurrentRedshift'])
        
        child_percents = {}
        for i,halo in enumerate(self.candidates):
            if i%self.size != self.mine:
                continue
            # Read in its particle IDs
            parent_IDs = na.array([], dtype='int64')
            parent_masses = na.array([], dtype='float64')
            for h5name in self.h5files[parent_currt][halo]:
                # Get the correct time dict entry, and then the correct h5 file
                # from that snapshot, and then choose this parent's halo
                # group, and then the particle IDs. How's that for a long reach?
                new_IDs = self.h5fp[parent_currt][h5name]['Halo%08d' % halo]['particle_index']
                new_masses = self.h5fp[parent_currt][h5name]['Halo%08d' % halo]['ParticleMassMsun']
                parent_IDs = na.concatenate((parent_IDs, new_IDs[:]))
                parent_masses = na.concatenate((parent_masses, new_masses[:]))
            # Loop over its children.
            temp_percents = {}
            for child in self.candidates[halo]:
                if child == -1:
                    # If this is a fake child, record zero contribution and move
                    # on.
                    temp_percents[-1] = 0.
                    continue
                child_IDs = na.array([], dtype='int64')
                for h5name in self.h5files[child_currt][child]:
                    new_IDs = self.h5fp[child_currt][h5name]['Halo%08d' % child]['particle_index']
                    child_IDs = na.concatenate((child_IDs, new_IDs[:]))
                # The IDs shared by both halos.
                intersect = na.intersect1d(parent_IDs, child_IDs)
                # Pick out the parent particles that go to the child.
                select = na.in1d(parent_IDs, intersect)
                # The fraction by mass of the parent that goes to the child.
                temp_percents[child] = parent_masses[select].sum() / parent_masses.sum()
            child_percents[halo] = temp_percents
        
        # Now we prepare a big list of writes to put in the database.
        write_values = []
        for i,halo in enumerate(self.candidates):
            if i%self.size != self.mine:
                continue
            child_IDs = []
            child_per = []
            for child in self.candidates[halo]:
                if child == -1:
                    # Account for fake children.
                    child_IDs.append(-1)
                    child_per.append(0.)
                    continue
                # We need to get the GlobalHaloID for this child.
                line = 'SELECT GlobalHaloID FROM Halos WHERE \
                SnapCurrentTimeIdentifier=? AND SnapHaloID=?'
                values = (child_currt, child)
                self.cursor.execute(line, values)
                child_globalID = self.cursor.fetchone()[0]
                child_IDs.append(child_globalID)
                child_per.append(child_percents[halo][child])
            # Sort by percentages, desending.
            child_per, child_IDs = zip(*sorted(zip(child_per, child_IDs), reverse=True))
            values = []
            for pair in zip(child_IDs, child_per):
                values.extend(pair)
            values.extend([parent_currt, halo])
            # This has the child ID, child percent listed five times, followed
            # by the currt and this parent halo ID (SnapHaloID).
            values = tuple(values)
            write_values.append(values)

        # Now we do the actual writing, but making sure that parallel tasks
        # don't try to write at the same time.
        line = 'UPDATE Halos SET ChildHaloID0=?, ChildHaloFrac0=?,\
        ChildHaloID1=?, ChildHaloFrac1=?,\
        ChildHaloID2=?, ChildHaloFrac2=?,\
        ChildHaloID3=?, ChildHaloFrac3=?,\
        ChildHaloID4=?, ChildHaloFrac4=?\
        WHERE SnapCurrentTimeIdentifier=? AND SnapHaloID=?;'
        for i in range(self.size):
            # There's a _barrier inside _ensure_db_sync,
            # so the loops are strictly sequential and only one task writes
            # at a time.
            self._ensure_db_sync()
            if i == self.mine:
                for values in write_values:
                    self.cursor.execute(line, values)
                self.conn.commit()
Пример #20
0
    def create_cosmology_splice(self,
                                minimal=True,
                                deltaz_min=0.0,
                                initial_redshift=None,
                                final_redshift=None):
        """
        Create list of datasets to be used for LightCones or LightRays.
        :param minimal (bool): if True, the minimum number of datasets is used to connect the initial and final 
               redshift.  If false, the list will contain as many entries as possible within the redshift 
               interval.  Default: True.
        :param deltaz_min (float): specifies the minimum delta z between consecutive datasets in the returned 
               list.  Default: 0.0.
        :param initial_redshift (float): the initial (highest) redshift in the cosmology splice list.  If none 
               given, the highest redshift dataset present will be used.  Default: None.
        :param final_redshift (float): the final (lowest) redshift in the cosmology splice list.  If none given, 
               the lowest redshift dataset present will be used.  Default: None.
        """

        if initial_redshift is None: initial_redshift = self.InitialRedshift
        if final_redshift is None: final_redshift = self.FinalRedshift

        # Calculate maximum delta z for each data dump.
        self._calculate_deltaz_max()

        # Calculate minimum delta z for each data dump.
        self._calculate_deltaz_min(deltaz_min=deltaz_min)

        cosmology_splice = []

        # Use minimum number of datasets to go from z_i to z_f.
        if minimal:

            z_Tolerance = 1e-3
            z = initial_redshift

            # fill redshift space with datasets
            while ((z > final_redshift)
                   and (na.fabs(z - final_redshift) > z_Tolerance)):

                # For first data dump, choose closest to desired redshift.
                if (len(cosmology_splice) == 0):
                    # Sort data outputs by proximity to current redsfhit.
                    self.allOutputs.sort(
                        key=lambda obj: na.fabs(z - obj['redshift']))
                    cosmology_splice.append(self.allOutputs[0])

                # Move forward from last slice in stack until z > z_max.
                else:
                    current_slice = cosmology_splice[-1]
                    while current_slice['next'] is not None and \
                            (z < current_slice['next']['redshift'] or \
                                 na.abs(z - current_slice['next']['redshift']) < z_Tolerance):
                        current_slice = current_slice['next']

                    if current_slice is cosmology_splice[-1]:
                        final_redshift = cosmology_splice[-1][
                            'redshift'] - cosmology_splice[-1]['deltazMax']
                        mylog.error(
                            "Cosmology splice incomplete due to insufficient data outputs."
                        )
                        break
                    else:
                        cosmology_splice.append(current_slice)

                z = cosmology_splice[-1]['redshift'] - cosmology_splice[-1][
                    'deltazMax']

        # Make light ray using maximum number of datasets (minimum spacing).
        else:
            # Sort data outputs by proximity to current redsfhit.
            self.allOutputs.sort(
                key=lambda obj: na.fabs(initial_redshift - obj['redshift']))
            # For first data dump, choose closest to desired redshift.
            cosmology_splice.append(self.allOutputs[0])

            nextOutput = cosmology_splice[-1]['next']
            while (nextOutput is not None):
                if (nextOutput['redshift'] <= final_redshift):
                    break
                if ((cosmology_splice[-1]['redshift'] - nextOutput['redshift'])
                        > cosmology_splice[-1]['deltazMin']):
                    cosmology_splice.append(nextOutput)
                nextOutput = nextOutput['next']
            if (cosmology_splice[-1]['redshift'] -
                    cosmology_splice[-1]['deltazMax']) > final_redshift:
                mylog.error(
                    "Cosmology splice incomplete due to insufficient data outputs."
                )
                final_redshift = cosmology_splice[-1][
                    'redshift'] - cosmology_splice[-1]['deltazMax']

        mylog.info(
            "create_cosmology_splice: Used %d data dumps to get from z = %f to %f."
            % (len(cosmology_splice), initial_redshift, final_redshift))

        return cosmology_splice
Пример #21
0
    def make_projections(self,
                         axes=[0, 1, 2],
                         halo_list='filtered',
                         save_images=False,
                         save_cube=True,
                         **kwargs):
        "Make projections of all halos using specified fields."

        # Get list of halos for projecting.
        if halo_list == 'filtered':
            self._halo_projection_list = self.filtered_halos
        elif halo_list == 'all':
            self._halo_projection_list = self.all_halos
        elif isinstance(halo_list, types.StringType):
            self._halo_projection_list = self._read_halo_list(halo_list)
        elif isinstance(halo_list, types.ListType):
            self._halo_projection_list = halo_list
        else:
            mylog.error(
                "Keyword, halo_list', must be 'filtered', 'all', a filename, or an actual list."
            )
            return

        if len(self._halo_projection_list) == 0:
            mylog.error("Halo list for projections is empty.")
            return

        # Set resolution for fixed resolution output.
        if save_cube:
            if self.project_at_level == 'max':
                proj_level = self.pf.h.max_level
            else:
                proj_level = int(self.project_at_level)
            proj_dx = self.pf.units[self.projection_width_units] / self.pf.parameters['TopGridDimensions'][0] / \
                (self.pf.parameters['RefineBy']**proj_level)
            projectionResolution = int(self.projection_width / proj_dx)

        outputDir = "%s/%s" % (self.pf.fullpath, self.projection_output_dir)
        self.__check_directory(outputDir)

        center = [
            0.5 * (self.pf.parameters['DomainLeftEdge'][w] +
                   self.pf.parameters['DomainRightEdge'][w])
            for w in range(self.pf.parameters['TopGridRank'])
        ]

        # Create a plot collection.
        pc = raven.PlotCollection(self.pf, center=center)

        for halo in self._get_objs('_halo_projection_list', round_robin=True):
            if halo is None:
                continue
            # Check if region will overlap domain edge.
            # Using non-periodic regions is faster than using periodic ones.
            leftEdge = [(halo['center'][w] - 0.5 * self.projection_width /
                         self.pf.units[self.projection_width_units])
                        for w in range(len(halo['center']))]
            rightEdge = [(halo['center'][w] + 0.5 * self.projection_width /
                          self.pf.units[self.projection_width_units])
                         for w in range(len(halo['center']))]

            mylog.info(
                "Projecting halo %04d in region: [%f, %f, %f] to [%f, %f, %f]."
                % (halo['id'], leftEdge[0], leftEdge[1], leftEdge[2],
                   rightEdge[0], rightEdge[1], rightEdge[2]))

            need_per = False
            for w in range(len(halo['center'])):
                if ((leftEdge[w] < self.pf.parameters['DomainLeftEdge'][w]) or
                    (rightEdge[w] > self.pf.parameters['DomainRightEdge'][w])):
                    need_per = True
                    break

            if need_per:
                region = self.pf.h.periodic_region(halo['center'], leftEdge,
                                                   rightEdge)
            else:
                region = self.pf.h.region(halo['center'], leftEdge, rightEdge)

            # Make projections.
            if not isinstance(axes, types.ListType): axes = list([axes])
            for w in axes:
                # YT projections do not follow the right-hand rule.
                coords = range(3)
                del coords[w]
                x_axis = coords[0]
                y_axis = coords[1]

                for hp in self.projection_fields:
                    pc.add_projection(hp['field'],
                                      w,
                                      weight_field=hp['weight_field'],
                                      source=region,
                                      lazy_reader=False,
                                      serialize=False,
                                      **kwargs)

                # Set x and y limits, shift image if it overlaps domain boundary.
                if need_per:
                    pw = self.projection_width / self.pf.units[
                        self.projection_width_units]
                    shift_projections(self.pf, pc, halo['center'], center, w)
                    # Projection has now been shifted to center of box.
                    proj_left = [
                        center[x_axis] - 0.5 * pw, center[y_axis] - 0.5 * pw
                    ]
                    proj_right = [
                        center[x_axis] + 0.5 * pw, center[y_axis] + 0.5 * pw
                    ]
                else:
                    proj_left = [leftEdge[x_axis], leftEdge[y_axis]]
                    proj_right = [rightEdge[x_axis], rightEdge[y_axis]]

                pc.set_xlim(proj_left[0], proj_right[0])
                pc.set_ylim(proj_left[1], proj_right[1])

                # Save projection data to hdf5 file.
                if save_cube:
                    axis_labels = ['x', 'y', 'z']
                    dataFilename = "%s/Halo_%04d_%s_data.h5" % \
                            (outputDir, halo['id'], axis_labels[w])
                    mylog.info("Saving projection data to %s." % dataFilename)

                    output = h5py.File(dataFilename, "a")
                    # Create fixed resolution buffer for each projection and write them out.
                    for e, hp in enumerate(self.projection_fields):
                        frb = raven.FixedResolutionBuffer(
                            pc.plots[e].data, (proj_left[0], proj_right[0],
                                               proj_left[1], proj_right[1]),
                            (projectionResolution, projectionResolution),
                            antialias=False)
                        dataset_name = "%s_%s" % (hp['field'],
                                                  hp['weight_field'])
                        if dataset_name in output.listnames():
                            del output[dataset_name]
                        output.create_dataset(dataset_name,
                                              data=frb[hp['field']])
                    output.close()

                if save_images:
                    pc.save("%s/Halo_%04d" % (outputDir, halo['id']),
                            force_save=True)

                pc.clear_plots()
            del region
        del pc
Пример #22
0
    def _get_halo_profile(self,
                          halo,
                          filename,
                          virial_filter=True,
                          force_write=False):
        """
        Profile a single halo and write profile data to a file.
        If file already exists, read profile data from file.
        Return a dictionary of id, center, and virial quantities if virial_filter is True.
        """

        # Read profile from file if it already exists.
        # If not, profile will be None.
        profile = self._read_profile(filename)

        # Make profile if necessary.
        newProfile = profile is None
        if newProfile:

            r_min = 2 * self.pf.h.get_smallest_dx() * self.pf['mpc']
            if (halo['r_max'] / r_min < PROFILE_RADIUS_THRESHOLD):
                mylog.error("Skipping halo with r_max / r_min = %f." %
                            (halo['r_max'] / r_min))
                return None

            sphere = self.pf.h.sphere(halo['center'],
                                      halo['r_max'] / self.pf.units['mpc'])
            if len(sphere._grids) == 0: return None
            new_sphere = False

            if self.use_density_center:
                dc_x = sphere.quantities['WeightedAverageQuantity'](
                    'x', 'MatterDensityXTotalMass')
                dc_y = sphere.quantities['WeightedAverageQuantity'](
                    'y', 'MatterDensityXTotalMass')
                dc_z = sphere.quantities['WeightedAverageQuantity'](
                    'z', 'MatterDensityXTotalMass')
                mylog.info("Moving halo center from %s to %s." %
                           (halo['center'], [dc_x, dc_y, dc_z]))
                halo['center'] = [dc_x, dc_y, dc_z]
                new_sphere = True

            if self.use_field_max_center is not None:
                ma, maxi, mx, my, mz, mg = sphere.quantities['MaxLocation'](
                    self.use_field_max_center)
                mylog.info("Moving halo center from %s to %s." %
                           (halo['center'], [mx, my, mz]))
                halo['center'] = [mx, my, mz]
                new_sphere = True

            if new_sphere:
                # Temporary solution to memory leak.
                for g in self.pf.h.grids:
                    g.clear_data()
                sphere.clear_data()
                del sphere
                sphere = self.pf.h.sphere(halo['center'],
                                          halo['r_max'] / self.pf.units['mpc'])

            if self._need_bulk_velocity:
                # Set bulk velocity to zero out radial velocity profiles.
                if self.velocity_center[0] == 'bulk':
                    if self.velocity_center[1] == 'halo':
                        sphere.set_field_parameter('bulk_velocity',
                                                   halo['velocity'])
                    elif self.velocity_center[1] == 'sphere':
                        sphere.set_field_parameter(
                            'bulk_velocity',
                            sphere.quantities['BulkVelocity']())
                    else:
                        mylog.error("Invalid parameter: VelocityCenter.")
                elif self.velocity_center[0] == 'max':
                    max_grid, max_cell, max_value, max_location = self.pf.h.find_max_cell_location(
                        self.velocity_center[1])
                    sphere.set_field_parameter('bulk_velocity', [
                        max_grid['x-velocity'][max_cell],
                        max_grid['y-velocity'][max_cell],
                        max_grid['z-velocity'][max_cell]
                    ])

            profile = lagos.BinnedProfile1D(sphere,
                                            self.n_profile_bins,
                                            "RadiusMpc",
                                            r_min,
                                            halo['r_max'],
                                            log_space=True,
                                            lazy_reader=False)
            for hp in self.profile_fields:
                profile.add_fields(hp['field'],
                                   weight=hp['weight_field'],
                                   accumulation=hp['accumulation'])

        if virial_filter:
            self._add_actual_overdensity(profile)

        if newProfile:
            mylog.info("Writing halo %d" % halo['id'])
            profile.write_out(filename, format='%0.6e')
        elif force_write:
            mylog.info("Re-writing halo %d" % halo['id'])
            self._write_profile(profile, filename, format='%0.6e')

        if newProfile:
            # Temporary solution to memory leak.
            for g in self.pf.h.grids:
                g.clear_data()
            sphere.clear_data()
            del sphere

        return profile
Пример #23
0
    def _save_light_cone_stack(self,
                               field=None,
                               weight_field=None,
                               filename=None,
                               over_write=True):
        "Save the light cone projection stack as a 3d array in and hdf5 file."

        # Make list of redshifts to include as a dataset attribute.
        redshiftList = na.array(
            [slice['redshift'] for slice in self.light_cone_solution])

        field_node = "%s_%s" % (field, weight_field)
        weight_field_node = "weight_field_%s" % weight_field

        import h5py
        if (filename is None):
            filename = "%s/%s_data" % (self.output_dir, self.output_prefix)
        if not (filename.endswith('.h5')):
            filename += ".h5"

        if (len(self.projection_stack) == 0):
            mylog.debug("save_light_cone_stack: no projection data loaded.")
            return

        mylog.info("Writing light cone data to %s." % filename)

        output = h5py.File(filename, "a")

        node_exists = field_node in output.listnames()

        if node_exists:
            if over_write:
                mylog.info("Dataset, %s, already exists, overwriting." %
                           field_node)
                write_data = True
                del output[field_node]
            else:
                mylog.info("Dataset, %s, already exists in %s, not saving." %
                           (field_node, filename))
                write_data = False
        else:
            write_data = True

        if write_data:
            mylog.info("Saving %s to %s." % (field_node, filename))
            self.projection_stack = na.array(self.projection_stack)
            field_dataset = output.create_dataset(field_node,
                                                  data=self.projection_stack)
            field_dataset.attrs['redshifts'] = redshiftList
            field_dataset.attrs['observer_redshift'] = na.float(
                self.observer_redshift)
            field_dataset.attrs['field_of_view_in_arcminutes'] = na.float(
                self.field_of_view_in_arcminutes)
            field_dataset.attrs['image_resolution_in_arcseconds'] = na.float(
                self.image_resolution_in_arcseconds)

        if (len(self.projection_weight_field_stack) > 0):
            if node_exists:
                if over_write:
                    mylog.info("Dataset, %s, already exists, overwriting." %
                               weight_field_node)
                    del output[field_node]
                else:
                    mylog.info(
                        "Dataset, %s, already exists in %s, not saving." %
                        (weight_field_node, filename))
                    write_data = False
            else:
                write_data = True

            if write_data:
                mylog.info("Saving %s to %s." % (weight_field_node, filename))
                self.projection_weight_field_stack = na.array(
                    self.projection_weight_field_stack)
                weight_field_dataset = output.create_dataset(
                    weight_field_node, data=self.projection_weight_field_stack)
                weight_field_dataset.attrs['redshifts'] = redshiftList
                weight_field_dataset.attrs['observer_redshift'] = na.float(
                    self.observer_redshift)
                weight_field_dataset.attrs[
                    'field_of_view_in_arcminutes'] = na.float(
                        self.field_of_view_in_arcminutes)
                weight_field_dataset.attrs[
                    'image_resolution_in_arcseconds'] = na.float(
                        self.image_resolution_in_arcseconds)

        output.close()
Пример #24
0
    def project_light_cone(self,
                           field,
                           weight_field=None,
                           apply_halo_mask=False,
                           node=None,
                           save_stack=True,
                           save_slice_images=False,
                           flatten_stack=False,
                           photon_field=False,
                           **kwargs):
        """
        Create projections for light cone, then add them together.
        :param weight_field (str): the weight field of the projection.  This has the same meaning as in standard 
               projections.  Default: None.
        :param apply_halo_mask (bool): if True, a boolean mask is apply to the light cone projection.  See below for a 
               description of halo masks.  Default: False.
        :param node (str): a prefix to be prepended to the node name under which the projection data is serialized.  
               Default: None.
        :param save_stack (bool): if True, the unflatted light cone data including each individual slice is written to 
               an hdf5 file.  Default: True.
        :param save_slice_images (bool): save images for each individual projection slice.  Default: False.
        :param flatten_stack (bool): if True, the light cone stack is continually flattened each time a slice is added 
               in order to save memory.  This is generally not necessary.  Default: False.
        :param photon_field (bool): if True, the projection data for each slice is decremented by 4 Pi R^2`, where R 
               is the luminosity distance between the observer and the slice redshift.  Default: False.
        """

        # Clear projection stack.
        self.projection_stack = []
        self.projection_weight_field_stack = []
        if (self.light_cone_solution[-1].has_key('object')):
            del self.light_cone_solution[-1]['object']

        if not (self.output_dir.endswith("/")):
            self.output_dir += "/"

        for q, output in enumerate(self.light_cone_solution):
            if node is None:
                name = "%s%s_%04d_%04d" % (self.output_dir, self.output_prefix,
                                           q, len(self.light_cone_solution))
            else:
                name = "%s%s_%s_%04d_%04d" % (self.output_dir,
                                              self.output_prefix, node, q,
                                              len(self.light_cone_solution))
            output['object'] = lagos.EnzoStaticOutput(output['filename'])
            frb = LightConeProjection(output,
                                      field,
                                      self.pixels,
                                      weight_field=weight_field,
                                      save_image=save_slice_images,
                                      name=name,
                                      node=node,
                                      **kwargs)
            if ytcfg.getint("yt", "__parallel_rank") == 0:
                if photon_field:
                    # Decrement the flux by the luminosity distance. Assume field in frb is in erg/s/cm^2/Hz
                    co = lagos.Cosmology(
                        HubbleConstantNow=(
                            100.0 *
                            self.enzoParameters['CosmologyHubbleConstantNow']),
                        OmegaMatterNow=self.
                        enzoParameters['CosmologyOmegaMatterNow'],
                        OmegaLambdaNow=self.
                        enzoParameters['CosmologyOmegaLambdaNow'])
                    dL = self.cosmology.LuminosityDistance(
                        self.observer_redshift, output['redshift'])  #in Mpc
                    boxSizeProper = self.enzoParameters[
                        'CosmologyComovingBoxSize'] / (
                            self.enzoParameters['CosmologyHubbleConstantNow'] *
                            (1.0 + output['redshift']))
                    pixelarea = (boxSizeProper /
                                 self.pixels)**2  #in proper cm^2
                    factor = pixelarea / (4.0 * na.pi * dL**2)
                    mylog.info("Distance to slice = %e" % dL)
                    frb[field] *= factor  #in erg/s/cm^2/Hz on observer's image plane.

            if ytcfg.getint("yt", "__parallel_rank") == 0:
                if weight_field is not None:
                    # Data come back normalized by the weight field.
                    # Undo that so it can be added up for the light cone.
                    self.projection_stack.append(frb[field] *
                                                 frb['weight_field'])
                    self.projection_weight_field_stack.append(
                        frb['weight_field'])
                else:
                    self.projection_stack.append(frb[field])

                # Delete the frb.  This saves a decent amount of ram.
                if (q < len(self.light_cone_solution) - 1):
                    del frb

                # Flatten stack to save memory.
                if flatten_stack and (len(self.projection_stack) > 1):
                    self.projection_stack = [sum(self.projection_stack)]
                    if weight_field is not None:
                        self.projection_weight_field_stack = [
                            sum(self.projection_weight_field_stack)
                        ]

            # Delete the plot collection now that the frb is deleted.
            del output['pc']

            # Unless this is the last slice, delete the dataset object.
            # The last one will be saved to make the plot collection.
            if (q < len(self.light_cone_solution) - 1):
                del output['object']

        if ytcfg.getint("yt", "__parallel_rank") == 0:
            # Add up slices to make light cone projection.
            if (weight_field is None):
                lightConeProjection = sum(self.projection_stack)
            else:
                lightConeProjection = sum(self.projection_stack) / sum(
                    self.projection_weight_field_stack)

            if node is None:
                filename = "%s%s" % (self.output_dir, self.output_prefix)
            else:
                filename = "%s%s_%s" % (self.output_dir, self.output_prefix,
                                        node)

            # Save the last fixed resolution buffer for the plot collection,
            # but replace the data with the full light cone projection data.
            frb.data[field] = lightConeProjection

            # Write stack to hdf5 file.
            if save_stack:
                self._save_light_cone_stack(field=field,
                                            weight_field=weight_field,
                                            filename=filename)

            # Apply halo mask.
            if apply_halo_mask:
                if len(self.halo_mask) > 0:
                    mylog.info("Applying halo mask.")
                    frb.data[field] *= self.halo_mask
                else:
                    mylog.error("No halo mask loaded, call get_halo_mask.")

            # Make a plot collection for the light cone projection.
            center = [
                0.5 *
                (self.light_cone_solution[-1]['object'].
                 parameters['DomainLeftEdge'][w] + self.light_cone_solution[-1]
                 ['object'].parameters['DomainRightEdge'][w])
                for w in range(self.light_cone_solution[-1]
                               ['object'].parameters['TopGridRank'])
            ]
            pc = raven.PlotCollection(self.light_cone_solution[-1]['object'],
                                      center=center)
            pc.add_fixed_resolution_plot(frb, field, **kwargs)
            pc.save(filename)

            # Return the plot collection so the user can remake the plot if they want.
            return pc
Пример #25
0
    def __init__(self,
                 pf,
                 halo_file=None,
                 omega_matter0=None,
                 omega_lambda0=None,
                 omega_baryon0=0.05,
                 hubble0=None,
                 sigma8input=0.86,
                 primordial_index=1.0,
                 this_redshift=None,
                 log_mass_min=None,
                 log_mass_max=None,
                 num_sigma_bins=360,
                 fitting_function=4,
                 mass_column=5):
        """
        Initalize a HaloMassFcn object to analyze the distribution of haloes
        as a function of mass.
        :param halo_file (str): The filename of the output of the Halo Profiler.
        Default=None.
        :param omega_matter0 (float): The fraction of the universe made up of
        matter (dark and baryonic). Default=None.
        :param omega_lambda0 (float): The fraction of the universe made up of
        dark energy. Default=None.
        :param omega_baryon0 (float): The fraction of the universe made up of
        ordinary baryonic matter. This should match the value
        used to create the initial conditions, using 'inits'. This is 
        *not* stored in the enzo datset so it must be checked by hand.
        Default=0.05.
        :param hubble0 (float): The expansion rate of the universe in units of
        100 km/s/Mpc. Default=None.
        :param sigma8input (float): The amplitude of the linear power
        spectrum at z=0 as specified by the rms amplitude of mass-fluctuations
        in a top-hat sphere of radius 8 Mpc/h. This should match the value
        used to create the initial conditions, using 'inits'. This is 
        *not* stored in the enzo datset so it must be checked by hand.
        Default=0.86.
        :param primoridal_index (float): This is the index of the mass power
        spectrum before modification by the transfer function. A value of 1
        corresponds to the scale-free primordial spectrum. This should match
        the value used to make the initial conditions using 'inits'. This is 
        *not* stored in the enzo datset so it must be checked by hand.
        Default=1.0.
        :param this_redshift (float): The current redshift. Default=None.
        :param log_mass_min (float): The log10 of the mass of the minimum of the
        halo mass range. Default=None.
        :param log_mass_max (float): The log10 of the mass of the maximum of the
        halo mass range. Default=None.
        :param num_sigma_bins (float): The number of bins (points) to use for
        the calculations and generated fit. Default=360.
        :param fitting_function (int): Which fitting function to use.
        1 = Press-schechter, 2 = Jenkins, 3 = Sheth-Tormen, 4 = Warren fit
        Default=4.
        :param mass_column (int): The column of halo_file that contains the
        masses of the haloes. Default=4.
        """
        self.pf = pf
        self.halo_file = halo_file
        self.omega_matter0 = omega_matter0
        self.omega_lambda0 = omega_lambda0
        self.omega_baryon0 = omega_baryon0
        self.hubble0 = hubble0
        self.sigma8input = sigma8input
        self.primordial_index = primordial_index
        self.this_redshift = this_redshift
        self.log_mass_min = log_mass_min
        self.log_mass_max = log_mass_max
        self.num_sigma_bins = num_sigma_bins
        self.fitting_function = fitting_function
        self.mass_column = mass_column

        # Determine the run mode.
        if halo_file is None:
            # We are hand-picking our various cosmological parameters
            self.mode = 'single'
        else:
            # Make the fit using the same cosmological parameters as the dataset.
            self.mode = 'haloes'
            self.omega_matter0 = self.pf['CosmologyOmegaMatterNow']
            self.omega_lambda0 = self.pf['CosmologyOmegaLambdaNow']
            self.hubble0 = self.pf['CosmologyHubbleConstantNow']
            self.this_redshift = self.pf['CosmologyCurrentRedshift']
            self.read_haloes()
            if self.log_mass_min == None:
                self.log_mass_min = math.log10(min(self.haloes))
            if self.log_mass_max == None:
                self.log_mass_max = math.log10(max(self.haloes))

        # Input error check.
        if self.mode == 'single':
            if omega_matter0 == None or omega_lambda0 == None or \
            hubble0 == None or this_redshift == None or log_mass_min == None or\
            log_mass_max == None:
                mylog.error("All of these parameters need to be set:")
                mylog.error("[omega_matter0, omega_lambda0, \
                hubble0, this_redshift, log_mass_min, log_mass_max]")
                mylog.error("[%s,%s,%s,%s,%s,%s]" % (omega_matter0,\
                omega_lambda0, hubble0, this_redshift,\
                log_mass_min, log_mass_max))
                return None

        # Poke the user to make sure they're doing it right.
        mylog.info("""
        Please make sure these are the correct values! They are
        not stored in enzo datasets, so must be entered by hand.
        sigma8input=%f primordial_index=%f omega_baryon0=%f
        """ % (self.sigma8input, self.primordial_index, self.omega_baryon0))
        time.sleep(1)

        # Do the calculations.
        self.sigmaM()
        self.dndm()

        if self.mode == 'haloes':
            self.bin_haloes()
Пример #26
0
    def calculate_light_cone_solution(self, seed=None, filename=None):
        """
        Create list of projections to be added together to make the light cone.
        :param seed (int): the seed for the random number generator.  Any light cone solution 
               can be reproduced by giving the same random seed.  Default: None (each solution 
               will be distinct).
        :param filename (str): if given, a text file detailing the solution will be written out.  Default: None.
        """

        # Don't use box coherence with maximum projection depths.
        if self.use_minimum_datasets and \
                self.minimum_coherent_box_fraction > 0:
            mylog.info(
                "Setting minimum_coherent_box_fraction to 0 with minimal light cone."
            )
            self.minimum_coherent_box_fraction = 0

        # Make sure recycling flag is off.
        self.recycleSolution = False

        # Get rid of old halo mask, if one was there.
        self.halo_mask = []

        if seed is not None:
            self.originalRandomSeed = int(seed)

        # Calculate projection sizes, and get random projection axes and centers.
        na.random.seed(self.originalRandomSeed)

        # For box coherence, keep track of effective depth travelled.
        boxFractionUsed = 0.0

        for q in range(len(self.light_cone_solution)):
            del self.light_cone_solution[q]['previous']
            del self.light_cone_solution[q]['next']
            if (q == len(self.light_cone_solution) - 1):
                z_next = self.final_redshift
            else:
                z_next = self.light_cone_solution[q + 1]['redshift']

            # Calculate fraction of box required for a depth of delta z
            self.light_cone_solution[q]['DepthBoxFraction'] = self.cosmology.ComovingRadialDistance(z_next, self.light_cone_solution[q]['redshift']) * \
                self.enzoParameters['CosmologyHubbleConstantNow'] / self.enzoParameters['CosmologyComovingBoxSize']

            # Simple error check to make sure more than 100% of box depth is never required.
            if (self.light_cone_solution[q]['DepthBoxFraction'] > 1.0):
                mylog.debug(
                    "Warning: box fraction required to go from z = %f to %f is %f"
                    % (self.light_cone_solution[q]['redshift'], z_next,
                       self.light_cone_solution[q]['DepthBoxFraction']))
                mylog.debug(
                    "Full box delta z is %f, but it is %f to the next data dump."
                    % (self.light_cone_solution[q]['deltazMax'],
                       self.light_cone_solution[q]['redshift'] - z_next))

            # Calculate fraction of box required for width corresponding to requested image size.
            scale = self.cosmology.AngularScale_1arcsec_kpc(
                self.observer_redshift,
                self.light_cone_solution[q]['redshift'])
            size = self.field_of_view_in_arcminutes * 60.0 * scale / 1000.0
            boxSizeProper = self.enzoParameters['CosmologyComovingBoxSize'] / (
                self.enzoParameters['CosmologyHubbleConstantNow'] *
                (1.0 + self.light_cone_solution[q]['redshift']))
            self.light_cone_solution[q][
                'WidthBoxFraction'] = size / boxSizeProper

            # Get projection axis and center.
            # If using box coherence, only get random axis and center if enough of the box has been used,
            # or if boxFractionUsed will be greater than 1 after this slice.
            if (q == 0) or (self.minimum_coherent_box_fraction == 0) or \
                    (boxFractionUsed > self.minimum_coherent_box_fraction) or \
                    (boxFractionUsed + self.light_cone_solution[q]['DepthBoxFraction'] > 1.0):
                # Random axis and center.
                self.light_cone_solution[q][
                    'ProjectionAxis'] = na.random.randint(0, 3)
                self.light_cone_solution[q]['ProjectionCenter'] = [
                    na.random.random(),
                    na.random.random(),
                    na.random.random()
                ]
                boxFractionUsed = 0.0
            else:
                # Same axis and center as previous slice, but with depth center shifted.
                self.light_cone_solution[q][
                    'ProjectionAxis'] = self.light_cone_solution[
                        q - 1]['ProjectionAxis']
                self.light_cone_solution[q][
                    'ProjectionCenter'] = copy.deepcopy(
                        self.light_cone_solution[q - 1]['ProjectionCenter'])
                self.light_cone_solution[q]['ProjectionCenter'][self.light_cone_solution[q]['ProjectionAxis']] += \
                    0.5 * (self.light_cone_solution[q]['DepthBoxFraction'] + self.light_cone_solution[q-1]['DepthBoxFraction'])
                if self.light_cone_solution[q]['ProjectionCenter'][
                        self.light_cone_solution[q]['ProjectionAxis']] >= 1.0:
                    self.light_cone_solution[q]['ProjectionCenter'][
                        self.light_cone_solution[q]['ProjectionAxis']] -= 1.0

            boxFractionUsed += self.light_cone_solution[q]['DepthBoxFraction']

        # Store this as the master solution.
        self.master_solution = [
            copy.deepcopy(q) for q in self.light_cone_solution
        ]

        # Write solution to a file.
        if filename is not None:
            self._save_light_cone_solution(filename=filename)
Пример #27
0
def find_unique_solutions(lightcone1,
                          solutions=100,
                          seed=None,
                          max_overlap=0.25,
                          failures=10,
                          recycle=True,
                          filename='unique.dat'):
    "Find a set of random seeds that will give light cones will minimal volume overlap."

    lightcone2 = copy.deepcopy(lightcone1)
    lightcone1.calculate_light_cone_solution(seed=0)
    lightcone2.calculate_light_cone_solution(seed=0)

    uniqueSeeds = []
    if recycle:
        master = None
    newRecycleSeed = None
    fails = 0
    recycleFails = 0

    maxCommon = 0.0

    # Need to continuall save and reset the state of the random number generator
    # since it is being reset by the light cone generator.
    if seed is None:
        state = None
    else:
        rand.seed(seed)
        state = rand.getstate()

    failDigits = str(int(na.log10(failures)) + 1)

    while (len(uniqueSeeds) < solutions):
        # Create new random seed.
        if (recycle and master is not None):
            newSeed = master
            if state is not None: rand.setstate(state)
            newRecycleSeed = rand.randint(1, 1e9)
            state = rand.getstate()
        else:
            if state is not None: rand.setstate(state)
            newSeed = rand.randint(1, 1e9)
            state = rand.getstate()
            if recycle:
                master = newSeed
                recycleFails = 0
            newRecycleSeed = None

        sys.stderr.write(("Unique solutions: %d, consecutive failures: %"+failDigits+"d, %"+failDigits+"d.\r") % \
                             (len(uniqueSeeds), fails, recycleFails))

        lightcone1.rerandomize_light_cone_solution(newSeed, recycle=False)
        if newRecycleSeed is not None:
            lightcone1.rerandomize_light_cone_solution(newRecycleSeed,
                                                       recycle=True)

        # Compare with all other seeds.
        testPass = True
        for uniqueSeed in uniqueSeeds:
            lightcone2.rerandomize_light_cone_solution(uniqueSeed['master'],
                                                       recycle=False)
            if uniqueSeed['recycle'] is not None:
                lightcone2.rerandomize_light_cone_solution(
                    uniqueSeed['recycle'], recycle=True)

            common = _compare_solutions(lightcone1.light_cone_solution,
                                        lightcone2.light_cone_solution)

            if (common > max_overlap):
                testPass = False
                break
            else:
                maxCommon = max(maxCommon, common)

        if testPass:
            uniqueSeeds.append({'master': newSeed, 'recycle': newRecycleSeed})
            fails = 0
            recycleFails = 0

        else:
            if recycle:
                recycleFails += 1
            else:
                fails += 1

            if (recycleFails >= failures):
                sys.stderr.write(("Unique solutions: %d, consecutive failures: %"+failDigits+"d, %"+failDigits+"d.\n") % \
                                     (len(uniqueSeeds), fails, recycleFails))
                fails += 1
                mylog.info(
                    "Max recycled failures reached with master seed %d." %
                    newSeed)
                master = None
            if (fails >= failures):
                sys.stderr.write(("Unique solutions: %d, consecutive failures: %"+failDigits+"d, %"+failDigits+"d.\n") % \
                                     (len(uniqueSeeds), fails, recycleFails))
                mylog.error("Max consecutive failures reached.")
                break

    mylog.info("Created %d unique solutions." % len(uniqueSeeds))
    mylog.info("Maximum common volume is %.2e." % maxCommon)
    _write_seed_file(uniqueSeeds, filename)
    return uniqueSeeds