def generate_object(self): r"""Generates an off axis ray through a preselected YtObject based on the parameters specified by the user in the widget.""" source = self.parent.active_data_object.data unit = self.coordinate_units_w.get_unit() sp = self.start_coord_w.get_coordinates() ep = self.end_coord_w.get_coordinates() name = self.object_name.get_name() if unit is not None: sp = YTArray(sp, unit) ep = YTArray(ep, unit) sp = source.arr(sp).in_units('code_length') ep = source.arr(ep).in_units('code_length') fp = self.field_parameters_w.get_field_parameters() ds = self.data_source_w.get_data_source() if fp == 'None': if ds is None: ray = source.ray(sp, ep) else: ray = source.ray(sp, ep, data_source=ds) new_object = YtDataObject(ray, name) self.parent.add_data_object(new_object)
def load_sightline_scatter_data(sim, ray_id, output=3195): fn = '../../data/unanalyzed_spectra/ray_%s_%i_%i.h5' % (sim, output, ray_id) plot_data = h5.File(fn, 'r')['grid'] l = YTArray(plot_data['l'], 'cm') l = np.array(l.in_units('kpc')) temperature = np.array(plot_data['temperature']) density = np.array(plot_data['density']) metallicity = np.array( plot_data['metallicity'] ) * 77.22007722007721 # converting from code units to zsun vx = np.array(plot_data['relative_velocity_x']) / 1e5 # converting to km/s vy = np.array(plot_data['relative_velocity_y']) / 1e5 # converting to km/s vz = np.array(plot_data['relative_velocity_z']) / 1e5 # converting to km/s vlos = np.array(plot_data['velocity_los']) / 1e5 dl = np.array(plot_data['dl']) # O VI and H I column densities oden = np.array(plot_data['O_p5_number_density']) ocol = dl * np.array(oden) sicol = dl * np.array(plot_data['Si_p2_number_density']) hcol = dl * np.array(plot_data['H_p0_number_density']) return l, temperature, density, metallicity, vlos, ocol, sicol
def get_romulusC_bulk_velocity(output): if output == 960: bv = YTArray([12.90373573, -44.42293777, -103.72552814], 'km/s') elif output == 768: bv = YTArray([8.64556658, -44.93365813, -97.94962149], 'km/s') elif output == 636: bv = YTArray([7.56707786, -41.7900487 , -90.4190418 ], 'km/s')
def load_simulation_properties(model, output=3195, ion_list=[ 'H I', 'O VI', 'Si II', 'Si III', 'Si IV', 'Mg II', 'N V', 'C IV' ]): # note AHF finds this value for P0: 78.13 -239.42 -65.99 if model == 'P0': ds = yt.load( '/Users/irynabutsky/simulations/patient0/pioneer50h243.1536gst1bwK1BH.%06d' % output) gcenter = YTArray([-16933.77317667, -12009.28144633, 5305.25448309], 'kpc') # generated with shrink sphere bulk_velocity = YTArray([73.05701672, -239.32976334, -68.07892736], 'km/s') ds.add_field(('gas', 'pressure'), function=_Pressure, sampling_type='particle', units=ds.unit_system["pressure"]) elif model == 'P0_agncr': print('adding cosmic ray') ds = yt.load( '/Users/irynabutsky/simulations/patient0_agncr/pioneer.%06d' % output) gcenter = YTArray([-16933.48544591, -12006.24067239, 5307.33807425], 'kpc') # generated with shrink sphere bulk_velocity = YTArray([74.98331176, -240.71723683, -67.77556155], 'km/s') ds.add_field(('gas', 'cr_pressure'), function=_CRPressure, sampling_type='particle', units=ds.unit_system["pressure"]) ds.add_field(('gas', 'pressure'), function=_Pressure, sampling_type='particle', units=ds.unit_system["pressure"]) ds.add_field(('gas', 'cr_eta'), display_name=('$P_{\\rm c} / P_{\\rm g}$'), function=_CRBeta, sampling_type='particle', units='') ds.add_field(('gas', 'O_mass'), function=_omass, sampling_type='particle', units=ds.unit_system['mass']) ds.add_field(('gas', 'Fe_mass'), function=_femass, sampling_type='particle', units=ds.unit_system['mass']) trident.add_ion_fields(ds, ions=ion_list) return ds, gcenter, bulk_velocity
def make_projection(ds, axis, ion_fields, center, width): half_width = np.array([width, width, width]) / 2. left_edge = center - YTArray(half_width, 'kpc') right_edge = center + YTArray(half_width, 'kpc') box = ds.region(center, left_edge, right_edge) proj = yt.ProjectionPlot(ds, axis, ion_fields, weight_field=None, width=width, center=center, data_source=box) return proj.data_source.to_frb((width, 'kpc'), 800) # p = ds.proj(ion_fields, axis, weight_field=None, data_source = box, center=center, method='integrate') print('made_projection')
def from_h5_file(cls, filename): r""" Generate an equilibrium model from an HDF5 file. Parameters ---------- filename : string The name of the file to read the model from. Examples -------- >>> from cluster_generator import ClusterModel >>> hse_model = ClusterModel.from_h5_file("hse_model.h5") """ f = h5py.File(filename) model_type = f["model_type"].value geometry = f["geometry"].value num_elements = f["num_elements"].value fnames = list(f['fields'].keys()) f.close() fields = OrderedDict() for field in fnames: fields[field] = YTArray.from_hdf5(filename, dataset_name=field, group_name="fields").in_base("galactic") return equilibrium_model_registry[model_type](num_elements, fields, geometry)
def generate_object(self): r"""This function generates an axis aligned ray from a YtObject with parameters specified by the user in the widget.""" axis = self.axis_dict[self.axisw.currentText()] units = self.coord_units_w.get_unit() coord1 = self.coord1_w.get_coordinate() coord2 = self.coord1_w.get_coordinate() source = self.parent.active_data_object.data name = self.object_name.get_name() if units is not None: coord = YTArray([coord1, coord2], units) coord = source.arr(coord).in_units('code_length') else: coord = [coord1, coord2] if self.field_parameters.get_field_parameters() == 'None': if self.data_source.get_data_source() is None: ray = source.ortho_ray(axis, coord) else: dsource = self.data_source.get_data_source() ray = source.ortho_ray(axis, coord, data_source=dsource) new_object = YtDataObject(ray, name) self.parent.add_data_object(new_object)
def z_to_time(z,cosmology=None): """ gives the time, in years, since the big bang given an astropy cosmology and a redshift. if no cosmology passed in, assumes Planck13 """ if cosmology is None: from astropy.cosmology import Planck13 as cosmology from yt import YTArray t = cosmology.age(z) return YTArray(t*t.unit.in_units('yr'),'yr')
def create_data_arrays(fc, data): """ Turn lists of values into array with proper cgs units. """ for field in data: if field in fc.density_fields: data[field] = YTArray(data[field], "g/cm**3") elif field == "energy": data[field] = YTArray(data[field], "erg/g") elif field == "time": data[field] = YTArray(data[field], "s") elif field == "temperature": data[field] = YTArray(data[field], "K") elif field == "pressure": data[field] = YTArray(data[field], "dyne/cm**2") else: data[field] = np.array(data[field]) return data
def load_romulusC(output, ions=[]): ds = yt.load('/nobackup/ibutsky/simulations/romulusC/romulusC.%06d' % output) ad = ds.all_data() cen = YTArray(rom.get_romulus_center('romulusC', output), 'kpc') bv = rom.get_romulusC_bulk_velocity(output) ad.set_field_parameter('center', cen) ad.set_field_parameter('bulk_velocity', bv) add_thermal_fields(ds) if len(ions) > 0: trident.add_ion_fields(ds, ions=ions) return ds
def generate_object(self): r"""Generates a YTSphere based on the parameters provided by the user in the widget.""" source = self.parent.active_data_object.data center = self.center.get_coordinates() center_units = self.center_units.get_unit() radius = self.radius.get_coordinate() radius_units = self.radius_units.get_unit() field_params = self.field_parameters.get_field_parameters() dsource = self.data_source.get_data_source() name = self.name.get_name() if center_units is not None: center = YTArray(center, center_units) if radius_units is not None: radius = YTArray(radius, radius_units) if field_params == 'None': sphere = source.sphere(center, radius, data_source=dsource) new_object = YtDataObject(sphere, name) self.parent.add_data_object(new_object)
def calc_angmom_0(data, bulk_vel0, rad_in, rad_out): print('rad_out = ', rad_out) angmom_x_avg = 0 angmom_y_avg = 0 angmom_z_avg = 1 vel_x_avg = 0 vel_y_avg = 0 vel_z_avg = 0 #inds = np.where(na.logical_and(data['density'] > den_in, data['density'] < den_out)) inds2 = np.where(data['Radius'] < rad_out) inds = np.where( na.logical_and(data['Radius'] > rad_in, data['Radius'] < rad_out)) mass_tot = na.sum(data['cell_mass'][inds]) marr = data['cell_mass'][inds] mass_tot2 = na.sum(data['cell_mass'][inds2]) marr2 = data['cell_mass'][inds2] vel_x_avg = na.sum( data['x-velocity'][inds2].value * marr2.value) / mass_tot2.value vel_y_avg = na.sum( data['y-velocity'][inds2].value * marr2.value) / mass_tot2.value vel_z_avg = na.sum( data['z-velocity'][inds2].value * marr2.value) / mass_tot2.value print('vel_x = ', vel_x_avg, 'vel_y = ', vel_y_avg, 'vel_z = ', vel_z_avg) print('marr2 = ', marr2) print('mass_tot2 = ', mass_tot2) bulk_vel = YTArray([vel_x_avg, vel_y_avg, vel_z_avg], 'cm/s') angmom_x_avg = na.sum( data['angular_momentum_x'][inds2].value * marr2) / mass_tot2 / 1.e50 angmom_y_avg = na.sum( data['angular_momentum_y'][inds2].value * marr2) / mass_tot2 / 1.e50 angmom_z_avg = na.sum( data['angular_momentum_z'][inds2].value * marr2) / mass_tot2 / 1.e50 data.set_field_parameter("bulk_velocity", bulk_vel) #data.set_field_parameter('normal', [angmom_x_avg, angmom_y_avg, angmom_z_avg]) bulk_vel = data.get_field_parameter("bulk_velocity") print('bul_vel = ', bulk_vel) normal = data.get_field_parameter("normal") print('normal = ', normal)
def load_simulation_properties(model, output, laptop=False): if model == 'tempest': ds = yt.load( '/mnt/c/scratch/sciteam/chummels/Tempest10/DD%04d/DD%04d' % (output, output)) if output == 524: rockstar_fn = '/projects/eot/bafa/tempest/tree_27.dat' halo_id = 27 rockstar_data = get_rockstar_data(rockstar_fn, halo_id) gcenter, bulk_velocity, rvir = read_rockstar_info( rockstar_data, ds) else: print('WARNING: No rockstar file for output %i' % (output)) ad = ds.all_data() # TODO elif model == 'P0': # calculate center of mass and bulk velocity using pynbody if laptop: fn = '~/Work/galaxy/P0/P0.003195' ds = yt.load(fn) gcenter = YTArray([-1.693207e+04, -1.201068e+04, 5.303337e+03], 'kpc') bulk_velocity = YTArray([72.78, -248.83, -63.49], 'km/s') else: import pynbody fn = '/nobackup/ibutsky/tmp/pioneer.%06d' % (output) ds = yt.load(fn) pynbody_file = '/nobackupp2/nnsanche/pioneer50h243.1536g1bwK1BH/pioneer50h243.1536gst1bwK1BH.%06d' % ( output) s = pynbody.load(pynbody_file) s.physical_units() gcenter = YTArray(pynbody.analysis.halo.center_of_mass(s.s), 'kpc') gcenter = YTArray([-1.693207e+04, -1.201068e4, 5.303337e3], 'kpc') print(gcenter) bulk_velocity = YTArray( pynbody.analysis.halo.center_of_mass_velocity(s.g), 'km/s') return ds, gcenter, bulk_velocity
def generate_object(self): r"""Generates an axis aligned object based on the user input from the widget.""" axis = self.axis_dict[self.axisw.currentText()] coord_unit = self.slice_point_unit_w.get_unit() coord = self.coordinate_w.get_coordinate() source = self.parent.active_data_object.get_data() name = self.name_w.get_name() field_params = self.field_parameters_w.get_field_parameters() dsource = self.data_source_w.get_data_source() if coord_unit is not None: coord = YTArray([coord], coord_unit) coord = source.arr(coord).in_units('code_length').item(0) if self.center_toggle_w.currentText() == 'Custom': c_coord = [self.center_coord1_w.get_coordinate(), self.center_coord2_w.get_coordinate()] c_coord_unit = self.sec_unit_w.get_unit() if c_coord_unit is not None: c_coord = YTArray(c_coord, c_coord_unit) else: c_coord = None if field_params == 'None': new_slice = source.slice(axis, coord, center=c_coord, field_parameters=None, data_source=dsource) new_object = YtDataObject(new_slice, name) self.parent.add_data_object(new_object)
def time_to_z(age, cosmology=None, v=False): """ returns the redshift of a given age using an astropy cosmology age is taken to be in Gyr if they're all less than 15, years otherwise (unless it's passed in as a YTQuantity/YTArray, then it's figured out) """ from yt import YTArray, YTQuantity if cosmology is None: from astropy.cosmology import Planck13 as cosmology from astropy.cosmology import z_at_value import astropy.units as u import numpy as np gyrconv = False #numpy array? if type(age) == type(np.array([1, 2, 3])): if (age < 15).all(): gyrconv = True age = u.Quantity(age * 1e9, u.yr) else: age = u.Quantity(age, u.yr) #single number? elif type(age) == type(1.2) or type(age) == type(1): if age < 15: gyrconv = True age = u.Quantity(age * 1e9, u.yr) else: age = u.Quantity(age, u.yr) #yt quantity? convert it elif type(age) == type(YTQuantity(12e9, 'yr')) or type(age) == type( YTArray([1., 2.])): age = u.Quantity(age.in_units('yr'), u.yr) #otherwise, gotta by an astropy quantity already else: assert type(age) == type(u.Quantity(13.6, u.yr)) if v and gyrconv: print "Converted to Gyr" try: it = iter(age) z = [] for ii in it: z.append(z_at_value(cosmology.age, ii)) z = np.array(z) except TypeError, te: # age is not iterable z = z_at_value(cosmology.age, age)
def load_charlotte_sim(sim): if sim == 'cosmo': fn = '/nobackupp8/trquinn/h986.cosmo50cmb.3072g/h986.cosmo50cmb.3072g1bwK/steps/h986.cosmo50cmb.3072g1bwK.00384.dir/h986.cosmo50cmb.3072g1bwK.00384' elif sim == 'H2': fn = '/nobackupp8/trquinn/h986.cosmo50cmb.3072g/h986.cosmo50cmb.3072g14HBWK/h986.cosmo50cmb.3072g14HBWK.00384' elif sim == 'metal': fn = '/nobackupp8/trquinn/h986.cosmo50cmb.3072g/h986.cosmo50cmb.3072gs1MbwK/h986.cosmo50cmb.3072gs1MbwK.00382/h986.cosmo50cmb.3072gs1MbwK.00382' # find the center: s = pynbody.load(fn) s.physical_units() #gcenter = YTArray(pynbody.analysis.halo.center_of_mass(s.s), 'kpc') gcenter = YTArray(pynbody.analysis.halo.shrink_sphere_center(s.s), 'kpc') ds = yt.load(fn) return ds, gcenter
def time_to_z(t, cosmo=None, verbose=False): # H0=YTQuantity(70.2,'km/s/Mpc')): from yt import YTQuantity, YTArray if cosmo is None: #use Planck 2015 from last column (TT, TE, EE+lowP+lensing+ext) of Table 4 from http://arxiv.org/pdf/1502.01589v2.pdf from yt.utilities.cosmology import Cosmology h = 0.6774 om = 0.3089 ol = 0.6911 #behroozi parameters # h=.7 # om=.27 # ol=1-om if verbose: print "Assuming a Planck 2015 cosmology (H0 = {0}, Om0 = {1}, OL = {2})".format( h * 100, om, ol) cosmo = Cosmology(hubble_constant=h, omega_matter=om, omega_lambda=ol) if type(t) != type(YTQuantity(1, 'Gyr')) and type(t) != type( YTArray([1, 2, 3], 'Gyr')): #then I need to figure out units and wrap in a yt object if type(t) == type(1.23): #single float if t < 15: #assume Gyr t = YTArray(t, 'Gyr') if verbose: print "Assuming time in Gyr" elif t < 1e11: #assume yr t = YTArray(t, 'yr') if verbose: print "Assuming time in yr" else: #then it's probably in seconds t = YTArray(t, 's') if verbose: print "Assuming time in seconds" else: from numpy import array t = array(t) if (t < 15).all(): t = YTArray(t, 'Gyr') if verbose: print "Assuming time in Gyr" elif (t < 1e11).all(): #assume yr t = YTArray(t, 'yr') if verbose: print "Assuming time in yr" else: #then it's probably in seconds t = YTArray(t, 's') if verbose: print "Assuming time in seconds" return cosmo.z_from_t(t)
def _radial_velocity(field, data): bv = data.get_field_parameter('bulk_velocity') cen = data.get_field_parameter('center') print(bv, cen) x = (data[('gas', 'x')].in_units('kpc') - cen[0].in_units('kpc')).d y = (data[('gas', 'y')].in_units('kpc') - cen[1].in_units('kpc')).d z = (data[('gas', 'z')].in_units('kpc') - cen[2].in_units('kpc')).d pos = np.column_stack((x, y, z)) pos_mag = np.linalg.norm(pos, axis=1) vx = (data[('gas', 'velocity_x')].in_units('km/s') - bv[0]).in_units('km/s').d vy = (data[('gas', 'velocity_y')].in_units('km/s') - bv[1]).in_units('km/s').d vz = (data[('gas', 'velocity_z')].in_units('km/s') - bv[2]).in_units('km/s').d vel = np.column_stack((vx, vy, vz)) vel_mag = np.linalg.norm(vel, axis=1) cos_theta = np.sum(pos * vel, axis=1) / (pos_mag * vel_mag) return YTArray(vel_mag * cos_theta, 'km/s')
def generate_object(self): r"""Generates a YTPoint from the selected yt object, with parameters specified by the user in the widget.""" unit = self.coordinate_unit_w.get_unit() coord = self.coord_combo_w.get_coordinates() if unit is not None: coord = YTArray(coord, unit) name = self.object_name.get_name() source = self.parent.active_data_object.data if self.field_parametersw.get_field_parameters() == 'None': if self.data_sourcew.get_data_source() is None: point = source.point(coord) new_object = YtDataObject(point, name) else: dsource = self.data_source.get_data_source() point = source.point(coord, data_source=dsource) new_object = YtDataObject(point, name) self.parent.add_data_object(new_object)
def generate_ray_image_data(field_list, weight_list, model='P0', output=3195, ray_data_file='', data_loc='.', ion_list='all', redshift=None): # load data set with yt, galaxy center, and the bulk velocity of the halo ds, gcenter, bulk_velocity = ds, gcenter, bv = spg.load_simulation_properties( model) # for annoying reasons... need to convert ray positions to "code units" code_unit_conversion = ds.domain_right_edge.d / ds.domain_right_edge.in_units( 'kpc').d print(ray_data_file) ray_id_list, impact, bvx, bvy, bvz, xi, yi, zi, xf, yf, zf, cx, cy, cz =\ np.loadtxt(ray_data_file, skiprows = 1, unpack = True) bulk_velocity = YTArray([bvx[0], bvy[0], bvz[0]], 'km/s') # set field parameters so that trident knows to subtract off bulk velocity ad = ds.all_data() ad.set_field_parameter('bulk_velocity', bulk_velocity) ad.set_field_parameter('center', gcenter) width = ds.arr([300., 20., 10], 'kpc') for i in [5]: # generate the coordinates of the random sightline # write ray id, impact parameter, bulk velocity, and start/end coordinates out to file h5file = h5.File( '%s/ray_image_data_%s_%i_%i.h5' % (data_loc, model, output, ray_id_list[i]), 'w') ray_start = ds.arr([xi[i], yi[i], zi[i]], 'kpc') ray_end = ds.arr([xf[i], yf[i], zf[i]], 'kpc') ray_direction = ray_end - ray_start ray_center = ray_start + 0.5 * ray_direction normal_vector = ds.arr(gcenter, 'kpc') - ray_center normal_vector = np.cross(normal_vector, ray_direction) for field, weight in zip(field_list, weight_list): print(field) sys.stdout.flush() if weight is not None: weight = ('gas', weight) if field not in h5file.keys(): #left_edge = #box = ds.region(ray_center, ) image = yt.off_axis_projection(ds, center=ray_center, normal_vector=normal_vector, width=width, resolution=[1200, 80], item=('gas', field), weight=weight) h5file.create_dataset(field, data=image) h5file.flush() h5file.close() print("saved sightline data %i\n" % (i))
remove_first_N_kpc = 1.0 # Load dataset fn = "/mnt/scratch/dsilvia/simulations/reu_sims/MW_1638kpcBox_800pcCGM_200pcDisk_lowres/DD1500/DD1500" ds = yt.load(fn) # Add H I & O VI ion fields using Trident trident.add_ion_fields(ds, ions=['O VI', 'H I'], ftype="gas") # Specify position where the ray starts (position in the volume) c = ds.arr([0.5, 0.5, 0.5], 'unitary') c = c.in_units('kpc') # location in the disk where we're setting our observations X = YTArray([8., 0., 0.], 'kpc') ray_start = c - X # Length of Ray R = 200.0 # do you want projections of the spheres? True/False MakeProjections = True # do you want debug information while the calculation goes on? True/False Debug = True # do you want to write out ray files? True/False WriteRays = True # fraction of rays that make plots? (determined randomly)
def make_onezone_ray(density=1e-26, temperature=1000, metallicity=0.3, length=10, redshift=0, filename='ray.h5', column_densities=None): """ Create a one-zone ray object for use as test data. The ray consists of a single absorber of hydrodynamic characteristics specified in the function kwargs. It makes an excellent test dataset to test Trident's capabilities for making absorption spectra. You can specify the column densities of different ions explicitly using the column_densities keyword, or you can let Trident calculate the different ion columns internally from the density, temperature, and metallicity fields. Using the defaults will produce a ray that should result in a spectrum with a good number of absorption features. **Parameters** :density: float, optional The gas density value of the ray in g/cm**3 Default: 1e-26 :temperature: float, optional The gas temperature value of the ray in K Default: 10**3 :metallicity: float, optional The gas metallicity value of the ray in Zsun Default: 0.3 :length: float, optional The length of the ray in kpc Default: 10. :redshift: float, optional The redshift of the ray Default: 0 :filename: string, optional The filename to which to save the ray to disk. Due to the mechanism for passing rays, the ray data must be saved to disk. Default: 'ray.h5' :column_densities: dict, optional The user can create a dictionary which adds more number density ion fields to the ray. Each key in the dictionary should be the desired ion field name according to the field name format: i.e. "<ELEMENT>_p<IONSTATE>_number_density" e.g. neutral hydrogen = "H_p0_number_density". The corresponding value for each key should be the desired column density of that ion in cm**-2. See example below. Default: None **Returns** A YT LightRay object **Example** Create a one-zone ray, and generate a COS spectrum from that ray. >>> import trident >>> ray = trident.make_onezone_ray() >>> sg = trident.SpectrumGenerator('COS') >>> sg.make_spectrum(ray) >>> sg.plot_spectrum('spec_raw.png') Create a one-zone ray with an HI column density of 1e21 (DLA) and generate a COS spectrum from that ray for just the Lyman alpha line. >>> import trident >>> ds = trident.make_onezone_ray(column_densities={'H_number_density': 1e21}) >>> sg = trident.SpectrumGenerator('COS') >>> sg.make_spectrum(ray, lines=['Ly a']) >>> sg.plot_spectrum('spec_raw.png') """ from yt import save_as_dataset length = YTArray([length], "kpc") data = { "density": YTArray([density], "g/cm**3"), "metallicity": YTArray([metallicity], "Zsun"), "dl": length, "temperature": YTArray([temperature], "K"), "redshift": np.array([redshift]), "redshift_eff": np.array([redshift]), "velocity_los": YTArray([0.], "cm/s"), "x": length / 2, "dx": length, "y": length / 2, "dy": length, "z": length / 2, "dz": length } extra_attrs = {"data_type": "yt_light_ray", "dimensionality": 3} field_types = dict([(field, "grid") for field in data.keys()]) # Add additional number_density fields to dataset if column_densities: for k, v in six.iteritems(column_densities): # Assure we add X_number_density for neutral ions # instead of X_p0_number_density key_string_list = k.split('_') if key_string_list[1] == 'p0': k = '_'.join([ key_string_list[0], key_string_list[2], key_string_list[3] ]) v = YTArray([v], 'cm**-2') data[k] = v / length field_types[k] = 'grid' ds = { "current_time": 0., "current_redshift": 0., "cosmological_simulation": 0., "domain_left_edge": np.zeros(3) * length, "domain_right_edge": np.ones(3) * length, "periodicity": [True] * 3 } save_as_dataset(ds, filename, data, field_types=field_types, extra_attrs=extra_attrs) # load dataset and make spectrum ray = load(filename) return ray
def plot_vel(filament,ds,dataset,fil=-1,maskarray=False): #Routine to plot velocity of particles along a filament #Set gravitational constant G = YTQuantity(6.67408E-11,'m**3/(kg * s**2)') #Gather velocity and density values from disk, done in parallel to speed computation #We first gather a list of the profiles on the disk, then reshape this into a list of [density,other] profiles #This is then iterated over in parallel to load the correct data filelist = sorted(os.listdir(''.join(['/shome/mackie/data/',dataset,'/profiles']))) profnumbers = len(filelist)/2 files = [ [filelist[i],filelist[i+profnumbers]] for i in range(profnumbers)] del filelist,profnumbers storage = {} for stor,file_in_dir in ytpar.parallel_objects(files,storage=storage): #Determines correct index to give to segment profiles filnum = int(file_in_dir[0][7:10]) segnum = int(file_in_dir[0][13:16]) #Calc total density for each segment densprof = yt.load(''.join(['/shome/mackie/data/',dataset,'/profiles/',file_in_dir[0]])) dm = densprof.data['dark_matter_density'].in_units('g/cm**3') dens = densprof.data['density'].in_units('g/cm**3') totaldens = dm + dens del densprof,dm,dens #Get velocity profiles velprof = yt.load(''.join(['/shome/mackie/data/',dataset,'/profiles/',file_in_dir[1]])) vel = velprof.data['cylindrical_radial_velocity'].in_units('km/s') stor.result = (vel,totaldens) stor.result_id = (filnum,segnum) #Restruct dict into np array of correct structure. vel_profs = [ [] for i in range(len(filament))] densprofs = [ [] for i in range(len(filament))] x = yt.load(''.join(['/shome/mackie/data/',dataset,'/profiles/',file_in_dir[0]])).data['x'] xarr = [[] for i in range(len(filament))] for key,values in sorted(storage.items()): filnum,segnum = key vel,dens = values xarr[filnum].append(x.in_units('Mpc')) vel_profs[filnum].append(vel.in_units('km/s')) densprofs[filnum].append(dens) for i in range(len(xarr)): xarr[i] = YTArray(np.array(xarr[i]),'Mpc') vel_profs[i] = YTArray(np.array(vel_profs[i]),'km/s') vel_profs = YTArray(np.array(vel_profs),'km/s') xarr = YTArray(np.array(xarr),'Mpc') del storage #Turn into np arrays for QoL densprofs = np.array(densprofs) #Gather x bins from disk #Determine masses and thus escape velocities mass = [get_masses(densprofs[i],x,filament[i],ds,accumulate=True) for i in range(len(filament))] mass = np.array(mass) mass = YTArray(mass,'g') print mass[1][1] del densprofs print xarr[1][1] vel_ratio = ( ( (2*G* mass) / xarr) ** (1.0/2.0)) vel_ratio = vel_ratio.in_units('km/s') if yt.is_root(): print mass[1][1] print xarr[1][1] print vel_ratio[1][1] #vel_ratio is **approx** escape vel, used to ratio later #Generate ratio of velocity to escape velocity vel_profs = (vel_profs.in_units('km/s')/vel_ratio.in_units('km/s')) del vel_ratio if fil > -1: length_plot = plot_vel_fil(vel_profs[fil].v,gen_dists(filament[fil],ds),x) else: length_plot = None if maskarray: print"Masking Data" vel_profs = np.ma.masked(vel_profs, mask = ~maskarray,fill_value=np.nan) #Flatten vel profs, ought to bemore elegant solution vel_prof_flatten = [] for fil in vel_profs: for seg in fil: vel_prof_flatten.append(seg) vel_profs = np.array(vel_prof_flatten) del vel_prof_flatten plot = probmap.prob_heat_map(vel_profs,'radial_velocity',x=x) return plot,length_plot
def _CRPressure(field, data): crgamma = 4. / 3. crpressure = (crgamma - 1.) * data[('Gas', 'CREnergy')].d * data[ ('Gas', 'density')].in_units('g/cm**3').d return YTArray(crpressure, 'dyn/cm**2')
#need to think about where I integrate from #in principle, that lower limit should be set probably by the requirement that the typical surface density of a proto-stellar disk, times pi*r^2 is greater than the expecd mass #from: Dullemond thesis: def sigma(r,runit,outunit="Msun/AU**2"): from yt import YTQuantity #,YTArray rinau = YTQuantity(r,runit).in_units('AU').item() sig_gcm2 = YTQuantity(1700*(rinau**-3./2.),'g/cm**2') return sig_gcm2.in_units(outunit).item() def sigma_integrand(r,runit): from numpy import pi return 2*pi*r*sigma(r,runit) constantsigma = True #Rmin is set by the requirement that integral_0^Rmin (2*pi*R*dr*sigma) >= M rbins = YTArray(logspace(-2,3,1e3),'AU') if not constantsigma: print "Assuming surface density goes as 1700 g/cm^2 (R/AU)^(-3/2)" Mltr = [quad(sigma_integrand,0,r.in_units('AU').item(),args='AU')[0] for r in rbins] #in Msun else: print "Assuming a constant surface density of 1500 g/cm^2" Mltr = YTArray(np.pi*YTQuantity(1500,'g/cm**2')* (rbins**2),'Msun') Mcut = 30. #Msun Rstar1 = (M1/YTQuantity(1,'Msun'))**(15./19.) * YTQuantity(1,'Rsun').in_units('AU') #http://physics.ucsd.edu/students/courses/winter2008/managed/physics223/documents/Lecture7%13Part3.pdf Rstar2 = (M2/YTQuantity(1,'Msun'))**(15./19.) * YTQuantity(1,'Rsun').in_units('AU') #http://physics.ucsd.edu/students/courses/winter2008/managed/physics223/documents/Lecture7%13Part3.pdf minR = max([rbins[Mltr>Mcut].min().in_units('AU'),(Rstar1+Rstar2).in_units('AU')]) if minR == Rstar1+Rstar2: print "Minimum separation is coming from the constraint that the two stars not touch"
def _accel_z(field, data): accel_unit = ds.length_unit.value / ds.time_unit.value**2 accel = data[('enzo', 'External_Acceleration_z')] * accel_unit return YTArray(accel, 'cm/s**2')
def generate_ray_image_data(field_list, weight_list, model='P0', output=3195, ray_data_file='../../data/P0_z0.25_ray_data.dat', data_loc='.', ion_list='all', redshift=None): # load data set with yt, galaxy center, and the bulk velocity of the halo if model == 'P0': ds = yt.load('~/Work/galaxy/P0/P0.003195') ds.add_field(('gas', 'mass'), function=_mass2, units='g', sampling_type='particle') trident.add_ion_fields(ds, ions=ion_list) # for annoying reasons... need to convert ray positions to "code units" code_unit_conversion = ds.domain_right_edge.d / ds.domain_right_edge.in_units( 'kpc').d ray_id_list, impact, bvx, bvy, bvz, xi, yi, zi, xf, yf, zf, cx, cy, cz =\ np.loadtxt(ray_data_file, skiprows = 1, unpack = True) gcenter_kpc = [cx[0], cy[0], cz[0] ] # assuming galaxy center is the same for all sightlines gcenter = gcenter_kpc * code_unit_conversion bulk_velocity = YTArray([bvx[0], bvy[0], bvz[0]], 'km/s') # set field parameters so that trident knows to subtract off bulk velocity ad = ds.all_data() ad.set_field_parameter('bulk_velocity', bulk_velocity) ad.set_field_parameter('center', gcenter) width = np.array([300., 20., 20.]) # kpc width *= code_unit_conversion ray_start_list = np.ndarray(shape=(0, 3)) ray_end_list = np.ndarray(shape=(0, 3)) for i in range(len(xi)): ray_start_list = np.vstack( (ray_start_list, [xi[i], yi[i], zi[i]] * code_unit_conversion)) ray_end_list = np.vstack( (ray_end_list, [xf[i], yf[i], zf[i]] * code_unit_conversion)) for i in [0, 10]: # generate the coordinates of the random sightline # write ray id, impact parameter, bulk velocity, and start/end coordinates out to file h5file = h5.File( '%s/ray_image_data_%s_%i_%i.h5' % (data_loc, model, output, ray_id_list[i]), 'a') ray_center = ray_start_list[i] + 0.5 * (ray_end_list[i] - ray_start_list[i]) ray_direction = ray_end_list[i] - ray_start_list[i] print(ray_center, ray_direction, width) # ray_center = [-0.42299158, -0.30013312, 0.13297239] # ray_direction = [0.6779612, -0.68934122, -0.25529845] # image = yt.off_axis_projection(ds, ray_center, ray_direction, width, # [1200, 80], ('gas', 'temperature'), weight = ('gas', 'density')) for field, weight in zip(field_list, weight_list): print(field) sys.stdout.flush() if weight is not None: weight = ('gas', weight) if field not in h5file.keys(): image = yt.off_axis_projection(ds, ray_center, ray_direction, width, [1200, 80], ('gas', field), weight=weight) h5file.create_dataset(field, data=image) h5file.flush() h5file.close() print("saved sightline data %i\n" % (i))
r_plot = np.arange(0.5 * DR, (NR + 0.5) * DR, DR) # bin centers if len(r_plot) > NR: r_plot = r_plot[1:] # do this all for h1 pos = gas_pos[glist] vel = gas_v[glist] pos -= center_of_quantity(pos, h1 * mass) vel -= center_of_quantity(vel, h1 * mass) mask = np.linalg.norm(pos, axis=1) <= r_max mass_within_r = YTQuantity(np.sum(mass[mask]), 'Msun') sigv_h1[i] = sigma_vel(vel[mask]) # get mass within r_max or within position of maximum velocity? vrot_grav_h1[i] = vrot_gravity(mass_within_r, YTArray(r_max, 'kpc').in_units('km'), YTQuantity(sigv_h1[i], 'km/s'), G) plot_name = results_dir + 'profiles/vrot_h1_profile_gal_' + str( i) + '.png' vrot_h1[i] = vrot_los(pos[mask], vel[mask], mass[mask], edge_vec, NR, DR, (h1 * mass)[mask], r_plot, plot_name) # do this all for h2 pos = gas_pos[glist] vel = gas_v[glist] pos -= center_of_quantity(pos, h2 * mass) vel -= center_of_quantity(vel, h2 * mass) mask = np.linalg.norm(pos, axis=1) <= r_max mass_within_r = YTQuantity(np.sum(mass[mask]), 'Msun')
def generate_ray_data(model, output, ray_data_file, data_loc = '.', \ ion_list = 'all', redshift = None): # load data set with yt, galaxy center, and the bulk velocity of the halo if model == 'P0': ds = yt.load('/nobackup/ibutsky/tmp/pioneer.%06d' % (output)) trident.add_ion_fields(ds, ions=ion_list) # for annoying reasons... need to convert ray positions to "code units" code_unit_conversion = ds.domain_right_edge.d / ds.domain_right_edge.in_units( 'kpc').d ray_id_list, impact, bvx, bvy, bvz, xi, yi, zi, xf, yf, zf, cx, cy, cz =\ np.loadtxt('../../data/P0_z0.25_ray_data.dat', skiprows = 1, unpack = True) gcenter_kpc = [cx[0], cy[0], cz[0] ] # assuming galaxy center is the same for all sightlines gcenter = gcenter_kpc * code_unit_conversion bulk_velocity = YTArray([bvx[0], bvy[0], bvz[0]], 'km/s') # set field parameters so that trident knows to subtract off bulk velocity ad = ds.all_data() ad.set_field_parameter('bulk_velocity', bulk_velocity) ad.set_field_parameter('center', gcenter) ray_start_list = np.ndarray(shape=(0, 3)) ray_end_list = np.ndarray(shape=(0, 3)) for i in range(len(xi)): ray_start_list = np.vstack( (ray_start_list, [xi[i], yi[i], zi[i]] * code_unit_conversion)) ray_end_list = np.vstack( (ray_end_list, [xf[i], yf[i], zf[i]] * code_unit_conversion)) # either specify redshift manually, or determine redshift from the redshift of the simulation if redshift is None: redshift = round(ds.current_redshift, 2) print(gcenter, gcenter[0], bulk_velocity) for i in range(1, 150): # generate the coordinates of the random sightline # write ray id, impact parameter, bulk velocity, and start/end coordinates out to file h5file = h5.File( '%s/ray_%s_%i_%i.h5' % (data_loc, model, output, ray_id_list[i]), 'a') # generate sightline using Trident ray = trident.make_simple_ray( ds, start_position=ray_start_list[i], end_position=ray_end_list[i], lines=ion_list, ftype='gas', field_parameters=ad.field_parameters, # the current redshift of the simulation, calculated above, rounded to two decimal places redshift=redshift) ad_ray = ray.all_data() # generating the list of all of the field_list = ['y', 'temperature', 'density', 'metallicity', 'dl'] source_list = [ad, ad, ad, ad, ad_ray] unit_list = ['kpc', 'K', 'g/cm**3', 'Zsun', 'cm'] yt_ion_list = ipd.generate_ion_field_list(ion_list, 'number_density', full_name=False) yt_ion_list[0] = 'H_number_density' field_list = np.append(field_list, yt_ion_list) for j in range(len(yt_ion_list)): unit_list.append('cm**-3') source_list.append(ad_ray) for field, source, unit in zip(field_list, source_list, unit_list): if field not in h5file.keys(): h5file.create_dataset(field, data=source[('gas', field)].in_units(unit)) h5file.flush() h5file.create_dataset('y_lr', data=ad_ray['y'].in_units('kpc')) h5file.flush() h5file.close() print("saved sightline data %i\n" % (i))
north_vector = [0,1,0] W_kpc_initial = 100 W_kpc_final = 40 if i > N/2.: cn = 1.*(i - N/2.) x_w = max(W_kpc_initial - cn, W_kpc_final) y_w = max(W_kpc_initial - cn, W_kpc_final) z_w = max(W_kpc_initial - cn, W_kpc_final) else: x_w = W_kpc_initial y_w = W_kpc_initial z_w = W_kpc_initial W = YTArray([x_w, y_w, z_w], 'kpc') print W, L N = 512 image1 = yt.off_axis_projection(ds_1, cen_1, L, W, N, ('gas', 'density'), north_vector = north_vector) image2 = yt.off_axis_projection(ds_2, cen_2, L, W, N, ('gas', 'density'), north_vector = north_vector) fig, axes = subplots(1,2, figsize = (10.8, 5)) image1 = image1.in_units('Msun * kpc**-2') image2 = image2.in_units('Msun * kpc**-2')
# load in simulation data and add ion fields plot_data = h5.File( '../../data/simulation_data/multipanel_data_%s_%06d_%i_kpc' % (model, output, width), 'a') ds, cen, bv = spg.load_simulation_properties(model) ds.add_field(('Gas', 'metallicity2'), function=_metallicity2, units='Zsun', sampling_type='particle') sp = ds.sphere(cen, (500, 'kpc')) sp.set_field_parameter('bulk_velocity', bv) left_edge = cen - YTArray([250, 250, 250], 'kpc') right_edge = cen + YTArray([250, 250, 250], 'kpc') box = ds.region(cen, left_edge, right_edge) box.set_field_parameter('bulk_velocity', bv) # set up projection plots for fields that are weighted and unweighted #del plot_data['radial_velocity'] for i in range(len(field_list)): print(field_list[0]) field = field_list[i][1] dset = '%s_%s' % (field, view) if dset not in plot_data.keys(): proj = yt.ProjectionPlot(ds, view, field,
#Bolshio-P Parameters box_size = 1024 voxel = 0.3603515625 #Mpc hubble_param = 0.6775067751 sim_dim = 369 # Mpc #Constants lambda_0 = 0.693 matter_0 = 0.307 density_conv = 10e-29 #Setup parameters density_ratios = [0.25, 0.5, 1, 2, 4] smoothing = 0.5 #mpc scale_factor = YTArray([(1 / (1 + redshift))]) lambda_a = (lambda_0) / ((matter_0 / (scale_factor**3)) + lambda_0) # print("----lambda_a------") # print(lambda_a) # print("----------") if (smoothing == 0.5): if (redshift == 0): filepath = filepath_z_0_s_0 elif (redshift == 0.5): filepath = filepath_z_5_s_0 elif (redshift == 1): filepath = filepath_z_1_s_0
# return 2*pi*r*sigma(r,runit) constantsigma = True #Rmin is set by the requirement that integral_0^Rmin (2*pi*R*dr*sigma) >= M rbins = YTArray(logspace(-3,3,1e3),'AU') if not constantsigma: print "Assuming surface density goes as 1700 g/cm^2 (R/AU)^(-3/2)" Mltr = [quad(sigma_integrand,0,r.in_units('AU').item(),args='AU')[0] for r in rbins] #in Msun else: print "Assuming a constant surface density of 1500 g/cm^2" Mltr = YTArray(np.pi*YTQuantity(1500,'g/cm**2')* (rbins**2),'Msun') rscaling = 0.555 #http://faculty.buffalostate.edu/sabatojs/courses/GES639/S10/reading/mass_luminosity.pdf M1 = YTArray(linspace(5,100,100),'Msun') #TODO need a better Rstar(Mstar) scaling that covers all masses Rstar1 = (M1/YTQuantity(1,'Msun'))**(rscaling) * YTQuantity(1,'Rsun').in_units('AU') ratios = [1,0.75,0.5,0.25,0.1,0.01] Msecondary = [M1*rat for rat in ratios] minP = [] minCoal = [] minR = [] # minP = [empty(Mprimary.shape[0]) for ii in range(len(ratios))] # minCoal = [empty(Mprimary.shape[0]) for ii in range(len(ratios))] for ii in range(len(ratios)): M2 = Msecondary[ii] Rstar2 = (M2/YTQuantity(1,'Msun'))**(rscaling) * YTQuantity(1,'Rsun').in_units('AU') #http://physics.ucsd.edu/students/courses/winter2008/managed/physics223/documents/Lecture7%13Part3.pdf