Esempio n. 1
0
def _find_los(points_dset_source, camera, nbSample):
    r"""
    Find the line of sight axis which is along the angular momentum of the gas inside the camera box

    Parameters
    ----------
    points_dset_source : :ref:`PointDataSource`
            fields "vel", "rho" and "size" needed

    camera : pymses camera box definition restriction

    nbSample : int (default=2000)
            not working yet : may speed up if random sampling ?
    """
    filtered_points_dset_source = RegionFilter(camera.get_bounding_box(),
                                               points_dset_source)
    filtered_points_dset = filtered_points_dset_source.flatten(
    )  # multiprocessing data reading and filtering
    d = filtered_points_dset.fields["rho"] * (
        filtered_points_dset.fields["size"]**3)
    v = filtered_points_dset["vel"]
    JJ = np.zeros_like(v)
    p = d[:, np.newaxis] * v
    JJ[:] = np.cross((filtered_points_dset.points[:] - camera.center), p[:])
    J = np.sum(JJ, axis=0)
    result_vect = J / sum(J**2)
    result_vect = result_vect / np.linalg.norm(result_vect, 2)
    return result_vect
Esempio n. 2
0
    def __init__(self,
                 source,
                 camera,
                 ext_size=0.0,
                 ext_3D=True,
                 keep_cache_dset=False,
                 use_camera_lvlmax=True):
        """
		Filter build to fit the camera bounding box

		Parameters
		----------
		source         : pymses data source
					
		camera   : pymses camera
						position (along w axis) of the fixed point in the right eye rotation
		ext_size	: float (default 0.0)
			extension to the camera box (in box unit between 0 and 1), used only if ext_3D==True
		ext_3D :	boolean (default True)
			if true, use an ExtendedCamera to extend the filtered region
		keep_cache_dset : boolean (default False)
			flag to keep the cache_dset dictonary of the source
			during the filtering process
			(used for PointsDataSet cache with the amrviewer GUI)
		use_camera_lvlmax : ``boolean`` (default True)
			Limit the transformation of the AMR grid to particles
			to AMR cells under the camera octree levelmax (so that visible cells
			are only the ones that have bigger size than the camera pixel size).
			Set this to False when using directly particle data from ".part"
			particles files (dark matter and stars particles), so as to get
			the cache_dset working without the levelmax specification
		"""
        self.cam = camera
        self.cam_box = camera.get_map_box()
        self.ext_3D = ext_3D
        if ext_3D:
            # Extended camera
            r = N.ones((2, 3)) * ext_size
            r[0, :] = -r[0, :]
            ecam = ExtendedCamera(camera, r)
        else:
            ecam = camera

        # Init Filter with extended camera bounding box region
        RegionFilter.__init__(self, ecam.get_bounding_box(), source)

        # Max. read level setup
        if use_camera_lvlmax:
            lreq = min(camera.get_required_resolution(), source.read_lmax)
            self.set_read_lmax(lreq)

        if keep_cache_dset:
            self.keep_cache_dset = keep_cache_dset
            self.cache_dset = source.cache_dset
Esempio n. 3
0
def amr2cell(ro=None,
             list_var=None,
             log_sfera=False,
             camera_in={},
             verbose=False):
    """
    log_sfera: Boolean
        True for sphere
    """
    assert ro != None
    assert list_var != None

    from pymses.utils import regions
    from pymses.filters import RegionFilter, CellsToPoints

    amr = ro.amr_source(list_var)

    center = camera_in['center']
    radius = camera_in['region_size'][0]

    if (log_sfera):
        regione_sp = regions.Sphere(center, radius)
    else:
        sinistra = np.copy(center) - radius
        destra = np.copy(center) + radius
        regione_sp = regions.Box((sinistra, destra))

    if (verbose):
        print 'Extracting cells'
        if (log_sfera):
            print '  getting a sphere'
            print '  center:', center
            print '  radius:', radius
        else:
            print '  getting a box'
            print '  center:', center
            print '  size  :', radius
            print '  left  :', sinistra
            print '  right :', destra

    # cut the region
    amr = RegionFilter(regione_sp, amr)
    amr = CellsToPoints(amr)

    celle = amr.flatten()
    amr = None

    return celle
Esempio n. 4
0
    def load(self, *fields):
        """Loads a list of quantitys in the self.qtyDataFrame"""
        print "-> Loading fields ...",
        ro = self.ramsesOutput
        df = self.qtyDataFrame
        # Recherche les champs de *fields qui
        # ne sont pas encore charges dans le dataframe
        fields_to_load = []
        for field in fields:
            if field.name not in df.keys():
                fields_to_load.append(field)
        #
        if fields_to_load:
            amr_fields_to_read = Q.get_amrfields_to_read(*fields_to_load)
            amrSource = ro.amr_source(amr_fields_to_read,
                                      grav_compat=True,
                                      verbose=self._verbose)
            amrSource = RegionFilter(self.sphere, amrSource)
            dset = CellsToPoints(amrSource).flatten()

            for field in fields_to_load:
                df[field.name] = pd.Series(field.process(ro, dset))
            print "   Fields loaded :",
            for field in fields_to_load:
                print field.name,
            print ""
            return fields_to_load
        else:
            print "   No field to load"
            return []
Esempio n. 5
0
def particles2cell(ro=None,
                   star=True,
                   list_var=None,
                   log_sfera=False,
                   camera_in={},
                   verbose=False):
    """
    log_sfera: Boolean
        True for sphere
    """

    assert ro is not None
    assert list_var is not None

    from pymses.utils import regions
    from pymses.filters import RegionFilter

    part = ro.particle_source(list_var)

    # Filter all the particles which are initially present in the simulation
    from pymses.filters import PointFunctionFilter
    if star:
        star_filter = lambda dset: dset["epoch"] != 0.0
        part = PointFunctionFilter(star_filter, part)
    else:
        dm_filter = lambda dset: dset["epoch"] == 0.0
        part = PointFunctionFilter(dm_filter, part)

    center = camera_in['center']
    radius = camera_in['region_size'][0]

    if (log_sfera):
        regione_sp = regions.Sphere(center, radius)
    else:
        sinistra = np.copy(center) - radius
        destra = np.copy(center) + radius
        regione_sp = regions.Box((sinistra, destra))

    if (verbose):
        print 'Extracting cells'
        if (log_sfera):
            print '  getting a sphere'
            print '  center:', center
            print '  radius:', radius
        else:
            print '  getting a box'
            print '  center:', center
            print '  size  :', radius
            print '  left  :', sinistra
            print '  right :', destra

    # cut the region
    part = RegionFilter(regione_sp, part)
    celle = part.flatten()

    return celle, part
Esempio n. 6
0
def _find_galaxy_axis(points_dset_source, camera, nbSample):
    '''
    If a galaxy disk is centered in the camera box, this function should
    return a galaxy disk othogonal axis.

    from seren3.utils.camera_utils import find_galaxy_axis

    Parameters
    ----------
    points_dset_source : :ref:`PointDataSource`
            fields "rho" and "size" needed

    camera : pymses camera, the galaxy's center has to fit the camera's center

    nbSample : int
            number of max massive points to use to compute the axis through cross product
    '''
    filtered_points_dset_source = RegionFilter(camera.get_bounding_box(),
                                               points_dset_source)
    filtered_points_dset = filtered_points_dset_source.flatten(
    )  # multiprocessing data reading and filtering
    region_filtered_mesh_mass = filtered_points_dset.fields["rho"] * (
        filtered_points_dset.fields["size"]**3)
    argsort = np.argsort(region_filtered_mesh_mass)
    center = camera.center
    nbSample = min(nbSample, argsort.size / 2 - 1)
    result_vect = np.array([0., 0., 0.])
    for i in range(nbSample):
        index1 = argsort[-2 * i]
        index2 = argsort[-2 * i - 1]
        vect1 = filtered_points_dset.points[index1] - center
        vect2 = filtered_points_dset.points[index2] - center
        vect = np.cross(vect1, vect2)
        sign = np.dot(vect, [0., 0., 1.])
        if sign < 0:
            vect = -vect
        result_vect = result_vect + vect * (region_filtered_mesh_mass[index1] +
                                            region_filtered_mesh_mass[index2]) * \
                                        np.linalg.norm((vect1-vect2),2)
    result_vect = result_vect / np.linalg.norm(result_vect, 2)
    return result_vect
Esempio n. 7
0
def test_sph_profile():
    # Halo parameters
    halo_center = [0.567811, 0.586055, 0.559156]  # in box units
    halo_radius = 0.00075  # in box units

    # RamsesOutput
    ro = RamsesOutput("/data/Aquarius/output", 193)

    # Prepare to read the mass/epoch fields only
    source = ro.particle_source(["mass", "epoch"])

    # Sphere region
    sph = Sphere(halo_center, halo_radius)

    # Filtering particles
    point_dset = RegionFilter(sph, source)
    dm_filter = lambda dset: dset["epoch"] == 0.0
    dm_parts = PointFunctionFilter(dm_filter, point_dset)

    # Profile computation
    m_weight_func = lambda dset: dset["mass"]
    r_bins = numpy.linspace(0.0, halo_radius, 200)

    # Mass profile
    # This triggers the actual reading of the particle data files from disk.
    mass_profile = bin_spherical(dm_parts,
                                 halo_center,
                                 m_weight_func,
                                 r_bins,
                                 divide_by_counts=False)

    # Density profile
    sph_vol = 4.0 / 3.0 * numpy.pi * r_bins**3
    shell_vol = numpy.diff(sph_vol)
    rho_profile = mass_profile / shell_vol

    # Plot
    # Geometrical midpoint of the bins
    length = ro.info["unit_length"].express(C.kpc)
    bins_centers = (r_bins[1:] + r_bins[:-1]) / 2. * length
    dens = ro.info["unit_density"].express(C.Msun / C.kpc**3)

    #h5f = tables.openFile("./long_tests/sph_profile.h5", mode='w')
    #h5f.createArray("/", "sph_profile", rho_profile)
    #h5f.close()

    h5f = tables.openFile("./long_tests/sph_profile.h5", mode='r')
    rho_profileB = h5f.getNode("/sph_profile").read()
    h5f.close()

    #print rho_profile
    assert (rho_profile - rho_profileB).all() < 10e-6
Esempio n. 8
0
    def __init__(self,
                 camera,
                 esize,
                 ramses_amr_source,
                 radius=None,
                 ngrid_max=2000000,
                 include_split_cells=False):  #{{{
        t0 = time()
        self.ngrid_max = int(ngrid_max)
        # Extended camera
        r = numpy.ones((2, 3)) * esize
        r[0, :] = -r[0, :]
        from pymses.analysis.visualization.camera import ExtendedCamera
        ecam = ExtendedCamera(camera, r)
        bb = ecam.get_bounding_box()

        if radius is None:
            # Init Filter with extended camera bounding box region
            reg = bb
        else:
            # Defined sphere region, checks the region includes the camera area
            zmax = numpy.max([camera.far_cut_depth, camera.distance])
            xmax = camera.region_size[0] / 2.
            ymax = camera.region_size[1] / 2.
            assert (radius**2 >= (xmax**2 + ymax**2 + zmax**2))
            reg = Sphere(camera.center, radius + esize)

        RegionFilter.__init__(self, reg, ramses_amr_source)

        # Max. read level setup
        lreq = camera.get_required_resolution()
        self.set_read_lmax(lreq)

        # Init. self.dset to None
        self.dset = None
        self.build_dset(ecam, radius, include_split_cells=include_split_cells)
        print("CameraOctreeDatasource loaded up to level", lreq,\
         "with ngrids =", self.dset.amr_struct["ngrids"],\
         "(loading time = %.2fs"%(time() - t0), ")")
Esempio n. 9
0
def _find_center_of_mass(points_dset_source, camera, nbSample):
    r"""
    Find the center of mass in the camera box

    Parameters
    ----------
    points_dset_source : :ref:`PointDataSource`
            fields "rho" and "size" needed

    camera : pymses camera box definition restriction

    nbSample : int (default=2000)
            not working yet : may speed up if random sampling ?
    """
    filtered_points_dset_source = RegionFilter(camera.get_bounding_box(),
                                               points_dset_source)
    filtered_points_dset = filtered_points_dset_source.flatten(
    )  # multiprocessing data reading and filtering
    d = filtered_points_dset.fields["rho"] * (
        filtered_points_dset.fields["size"]**3)
    mass = np.sum(d)
    cm = np.sum(d[:, np.newaxis] * filtered_points_dset.points, axis=0) / mass
    return cm
Esempio n. 10
0
    def filt_cube(self, cube, dset_type, fields):
        '''
		Return flattened dset of points contained by this cube
		'''
        if dset_type == Type.AMR:
            source = self.amr_source(fields)
        elif dset_type == Type.PART:
            source = self.particle_source(fields)
        else:
            raise Exception("No such type: ", dset_type)
        from pymses.filters import CellsToPoints
        from pymses.filters import RegionFilter

        filt_source = RegionFilter(cube, source)
        return CellsToPoints(filt_source).flatten()
Esempio n. 11
0
    def sphere(self, halo, source):
        '''
		Return a pymses sphere centred on this halo
		'''
        pos = halo['pos'].in_units('code_length').value
        r = halo['Rvir'].in_units('code_length').value
        region = Sphere(pos, r)
        filt_region = RegionFilter(region, source)

        if filt_region is None:
            raise Exception("Unable to create sphere: pos - ", pos + " r - ",
                            r)

        #return filt_region
        return Region(filt_region)
Esempio n. 12
0
    def __getitem__(self, fields):
        """
        Data access via pymses for family specific tracked/derived fields
        """
        from serensource import SerenSource
        if not hasattr(fields, "__iter__"):
            fields = [fields]
            
        source, required_fields = self.get_source(fields, return_required_fields=True)

        if self.family in ['amr', 'rt']:
            from pymses.filters import CellsToPoints
            source = CellsToPoints(source)

        cpu_list = None
        if hasattr(self.base, "region"):
            from pymses.filters import RegionFilter
            source = RegionFilter(self.base.region, source)

        return SerenSource(self, source)
Esempio n. 13
0
def amr2cube(source, var, xmin, xmax, cubelevel, out=None):
    r"""
	amr2cube tool.

	"""
    # allow amr2cube to work with a vector
    op = var
    if isinstance(var, str):
        op = lambda dset: dset[var]

    # TODO :  add cache_dset reuse possibility
    # Region CPU-filtering
    b = Box([xmin, xmax])
    rsource = RegionFilter(b, source)
    rsource.set_read_lmax(cubelevel)
    try:
        from multiprocessing import Process, Queue, cpu_count
        if cpu_count() == 1:
            raise (Exception
                   )  # don't use multiprocessing if there is only one cpu
        dsets = []
        from pymses.utils import misc
        NUMBER_OF_PROCESSES = min(len(rsource._data_list), cpu_count(),
                                  misc.NUMBER_OF_PROCESSES_LIMIT)

        # define long computing process method to do in parallel
        def read_dsets_to_cartesian(cpu_task_queue, dsets_queue, rsource):
            """Utility method for amr2cube multiprocessing method. 
			It reads the given list of data file and concatenate resulting dsets

			Parameters
			----------
			cpu_task_queue : ``list`` of ``int``
				queue of data file number corresponding to data files that have to be read by a process
			dsets_queue : multiprocessing queue
				to send the result to parent process
		
			"""
            len_dsets = 0
            out = None
            for icpu in iter(cpu_task_queue.get, 'STOP'):
                dset = rsource.get_domain_dset(icpu)
                active_mask = dset.get_active_mask()
                g_levels = dset.get_grid_levels()
                # We do the processing only if needed, i.e. only if the
                # amr level cubelevel of active cells in the dset is >0
                if len(g_levels[active_mask] == cubelevel) > 0:
                    if out is None:
                        out = dset.to_cartesian(var, xmin, xmax, cubelevel)
                    else:
                        dset.to_cartesian(var, xmin, xmax, cubelevel, dest=out)
                    len_dsets += 1
            if len_dsets == 0:
                dsets_queue.put("NoDset")
            else:
                dsets_queue.put(out)

        # Create queues
        cpu_task_queue = Queue()
        dsets_queue = Queue()
        # Submit tasks
        for task in rsource._data_list:
            cpu_task_queue.put(task)
        # Start worker processes
        for i in range(NUMBER_OF_PROCESSES):
            Process(target=read_dsets_to_cartesian,
                    args=(cpu_task_queue, dsets_queue, rsource)).start()
        # Tell child processes to stop when they have finished
        for i in range(NUMBER_OF_PROCESSES):
            cpu_task_queue.put('STOP')
        # Get results
        for i in range(NUMBER_OF_PROCESSES):
            outP = dsets_queue.get()
            if outP != "NoDset":
                if out is None:
                    out = outP
                else:
                    out += outP
    except Exception:
        print('WARNING: multiprocessing unavailable')
        for dset in rsource.iter_dsets():
            active_mask = dset.get_active_mask()
            g_levels = dset.get_grid_levels()
            # We do the processing only if needed, i.e. only if the
            # amr level cubelevel of active cells in the dset is >0
            if len(g_levels[active_mask] == cubelevel) > 0:
                if out is None:
                    out = dset.to_cartesian(var, xmin, xmax, cubelevel)
                else:
                    dset.to_cartesian(var, xmin, xmax, cubelevel, dest=out)

    return out
Esempio n. 14
0
 def get_domain_dset(self, idomain):  #{{{
     if self.dset is None:
         return RegionFilter.get_domain_dset(self, idomain)
     else:
         return self.dset
Esempio n. 15
0
	def mstar_halomass_tree(self, tree, ds):

		snapshot = self._snapshot
		ro = snapshot.raw_snapshot()
		parts = ro.particle_source(["mass", "epoch"])
		amr = ro.amr_source(['rho'])

		#Figure out the smallest possible cell size in code units
		boxlen = ro.info['boxlen']
		lmax = ro.info['levelmax']

		min_dx = boxlen/float(2**(lmax))

		offset = tree.get_offset()
		snap_num = self._snapshot.output_number() - offset

		halos = tree.sub_catalogue('snap_num', snap_num)

		#idx = np.where(tree._tree[:]['snap_num'] == self._snapshot.output_number()-30)[0]
		#halos = tree[idx] # Is this right?

		print 'Loaded halos have snap_num:', halos[1]['snap_num']

		mstar = []
		mhalo = []

		i = 1
		for halo in halos:
			#Check if distinct halo
			if not halo['pid'].value == -1:
				print 'Halo %d skipped: PID = %d'%(halo['id'].value, halo['pid'].value)
				continue

			#Unit conversion
			cen = halo['pos']
			cen = ds.arr(cen.value, cen.unit)
			r = halo['Rvir']
			r = ds.arr(r.value, r.unit)

			#Create the sphere
			cen = cen.in_units('code_length').value
			r = r.in_units('code_length').value
			r_amr = float(r)

			#If the halo radius is smaller than the min cell size,
			#we need to try and approximate the masses enclosed.
			#Not sure how correct this is...
			factor = 1
			if r < min_dx:
				r_amr += min_dx
				vol_halo = (4./3.)*np.pi*r**3
				vol_cell = (4./3.)*np.pi*r_amr**3
				factor = vol_halo/vol_cell

			#Define the regions
			region_part = Sphere(cen, r)
			region_amr = Sphere(cen, r_amr)

			#Filter the particles
			filt_parts = RegionFilter(region_part, parts)			
			part_source = filt_parts.flatten()

			dm = np.where(part_source['epoch'] == 0)[0]
			stars = np.where(part_source['epoch'] != 0)[0]

			part_mass = part_source['mass']*ro.info['unit_mass'].express(C.Msun)

			if len(part_mass[dm]) < 50:
				print 'Discarding halo with less than 200 particles. Mvir=%e'%halo['Mvir'].value
				continue

			#Filter the AMR data
			filt_amr = RegionFilter(region_amr, amr)
			cell_source = CellsToPoints(filt_amr)
			cells = cell_source.flatten()
			rho = cells['rho']*ro.info['unit_density'].express(C.Msun/C.kpc**3)
			vol = (cells.get_sizes()*ro.info['unit_length'].express(C.kpc))**3
			cell_mass = rho*vol

			#Compute the total mass enclosed
			gas_mass = np.sum(cell_mass)*factor # Multiply by factor incase we had to inflate the sphere
			stellar_mass = np.sum(part_mass[stars])
			particle_mass = np.sum(part_mass)
			total_mass = gas_mass + particle_mass

			mstar.append(stellar_mass)
			mhalo.append(total_mass)

			i+=1
			print i
			#print i
			if(config.verbose and (i%100)==0): print 'Processed %d halos...'%i
		return np.array(mstar), np.array(mhalo)
Esempio n. 16
0
	def fgas_halomass(self, halos=None):

		snapshot = self._snapshot
		ro = snapshot.raw_snapshot()
		amr = ro.amr_source(['rho'])
		parts = ro.particle_source(["mass"])

		#Figure out the smallest possible cell size in code units
		boxlen = ro.info['boxlen']
		lmax = ro.info['levelmax']

		min_dx = boxlen/float(2**(lmax))

		if config.verbose: print 'min_dx=', min_dx

		if (halos == None):
			halos = snapshot.halos()

		if config.verbose: print 'Processing %d halos'%len(halos)

		fgas = []
		mhalo = []

		#z = self._snapshot.current_redshift()
		#a = 1./(1.+z)

		i = 0
		for halo in halos:
			if halo['num_p'] < 150:
				break

			#if tree:
				#Find this halo and count number of progenitors.
				#If too high, skip (frequent mergers -> tidal stripping)

				#idx_tree_halo = (tree._tree[:]['orig_halo_id'] == halo['id'].value) &\
				#		 (tree._tree[:]['snap_num'] == snapshot.rockstar_output_number())
				#print idx_tree_halo
				#tree_halo = tree._tree[idx_tree_halo]
				#assert(len(tree_halo)==1)
				#all_halos = []
				#tree.find_progs(np.array([tree_halo]), all_halos)
				#num_progs = len(all_halos)
				#print 'Halo %d has %d progenitors'%(halo['id'], num_progs)
				#if num_progs > 150: continue

			#Create the sphere
			cen = halo['pos'].in_units('code_length').value
			r = halo['Rvir'].in_units('code_length').value
			r_amr = float(r)

			#If the halo radius is smaller than the min cell size,
			#we need to try and approximate the masses enclosed.
			#Not sure how correct this is...
			factor = 1
			if r < min_dx:
				r_amr += min_dx
				vol_halo = (4./3.)*np.pi*r**3
				vol_cell = (4./3.)*np.pi*r_amr**3
				factor = vol_halo/vol_cell

			#Define the regions
			region_part = Sphere(cen, r)
			region_amr = Sphere(cen, r_amr)

			#Filter the particles
			filt_parts = RegionFilter(region_part, parts)
			part_source = filt_parts.flatten()
			part_mass = part_source['mass']*ro.info['unit_mass'].express(C.Msun)

			#Filter the AMR data
			filt_amr = RegionFilter(region_amr, amr)
			cell_source = CellsToPoints(filt_amr)
			cells = cell_source.flatten()
			rho = cells['rho']*ro.info['unit_density'].express(C.Msun/C.kpc**3)
			vol = (cells.get_sizes()*ro.info['unit_length'].express(C.kpc))**3
			cell_mass = rho*vol

			#Compute the total mass enclosed
			gas_mass = np.sum(cell_mass)*factor # Multiply by factor incase we had to inflate the sphere
			particle_mass = np.sum(part_mass)
			total_mass = gas_mass + particle_mass

			fgas.append(gas_mass/total_mass)
			mhalo.append(total_mass)

			i+=1
			#print i
			if(config.verbose and (i%100)==0): print 'Processes %d halos...'%i
		return np.array(fgas), np.array(mhalo)