def get_halo_particles(my_halo, par_file):
    pf = yt.load(par_file)

    if yt.is_root():
        print ("Reading in particles for halo %d." % my_halo['id'])

    halo_files = glob.glob(os.path.join(pf.fullpath, 'MergerHalos_*.h5'))

    particle_indices = np.array([])
    particle_masses = np.array([])
    pos_x = np.array([])
    pos_y = np.array([])
    pos_z = np.array([])
    halo_name = 'Halo%08d' % my_halo['id']
    for halo_file in halo_files:
        input = h5.File(halo_file, 'r')
        if halo_name in input.keys():
            particle_indices = np.concatenate([particle_indices, input[halo_name]['particle_index'].value])
            particle_masses = np.concatenate([particle_masses, input[halo_name]['ParticleMassMsun'].value])
            pos_x = np.concatenate([pos_x, input[halo_name]['particle_position_x'].value])
            pos_y = np.concatenate([pos_y, input[halo_name]['particle_position_y'].value])
            pos_z = np.concatenate([pos_z, input[halo_name]['particle_position_z'].value])
        input.close()
    particle_positions = np.array([pos_x, pos_y, pos_z])
    if particle_indices is None:
        if yt.is_root():
            print ("Error: could not locate halo %d." % my_halo['id'])
        return None
    return (particle_indices, particle_masses, particle_positions)
Ejemplo n.º 2
0
def initialize():
    try:
        array = pickle.load(open('minarray.p', 'r'))
        if yt.is_root():
            print 'minarray.p loaded'
    except:
        if yt.is_root():
            print 'minarray.p not found. Run yt_get_mins.py first'
        return

    flag = array['Eintmin'] < Eint_threshold
    global toplot
    toplot = array['basename'][flag]
Ejemplo n.º 3
0
def phase(sq, phfname, bin_fields, aux={}):
    pdfs = my_pdf(sq)

    for bf in bin_fields:
        nbin1, nbin2 = (128, 128)
        if bf[0] in aux: nbin1 = aux[bf[0]]['n_bins']
        if bf[1] in aux: nbin2 = aux[bf[1]]['n_bins']
        n_bins = (nbin1, nbin2)
        logs = {}
        unit = {}
        extrema = {}
        for b in bf:
            logs[b] = False
            if b in aux:
                if 'log' in aux[b]: logs[b] = aux[b]['log']
                if 'unit' in aux[b]: unit[b] = aux[b]['unit']
                if 'limints' in aux[b]: extrema[b] = aux[b]['limits']
        pdf = yt.create_profile(sq,
                                bf,
                                fields=fields,
                                n_bins=n_bins,
                                logs=logs,
                                extrema=extrema,
                                units=unit,
                                weight_field=None,
                                fractional=True)
        pdfs.add_pdf(pdf, bf)
    if yt.is_root():
        pickle.dump(pdfs, open(phfname, 'wb'), pickle.HIGHEST_PROTOCOL)
def calculate_mass_in_sphere(dd):
	data_sub_dir = dd.name
	a = dd.a	
	r_plus = M*(1 + math.sqrt(1 - a**2))
	min_radius = r_plus/4

	start_time = time.time()
	
	# load dataset time series
	dataset_path = data_root_path + "/" + data_sub_dir + "/KerrSFp_*.3d.hdf5"
	ds = yt.load(dataset_path) # this loads a dataset time series
	print("loaded data from ", dataset_path)
	print("time = ", time.time() - start_time)
	N = len(ds)
	
	# set centre
	center = [512.0, 512.0, 0]
	L = 512.0	

	data_storage = {}
	# iterate through datasets (forcing each to go to a different processor)
	for sto, dsi in ds.piter(storage=data_storage):
		time_0 = time.time()
		# store time
		current_time = dsi.current_time 
		output = [current_time]
		
		# make shell
		shell = dsi.sphere(center, max_R+0.5*dR) - dsi.sphere(center, max_R-0.5*dR)
			
		# calculate energy inside sphere
		meanJr = shell.mean("J_r", weight="cell_volume")
		if half_box:
			area = 2*np.pi*max_R**2
		flux = meanJr*area
		output.append(flux)
		
		# store output
		sto.result = output
		sto.result_id = str(dsi)
		dt = 2.5
		i = int(current_time/dt)
		print("done {:d} of {:d} in {:.1f} s".format(i+1, N, time.time()-time_0), flush=True)
	
	if yt.is_root():	
		# make data directory if it does not already exist
		makedirs(home_path + output_dir, exist_ok=True)
		# output to file
		dd.filename = "{:s}_mass_flux_at_R={:d}.csv".format(dd.name, max_R)
		output_path = home_path + output_dir + "/" + dd.filename 
		# output header to file
		f = open(output_path, "w+")
		f.write("# t	mass flux at R=" + str(max_R) + " #\n")
		# output data
		for key in sorted(data_storage.keys()):
			data = data_storage[key]
			f.write("{:.3f}	".format(data[0]))
			f.write("{:.2f}\n".format(data[1]))
		f.close()
		print("saved data to file " + str(output_path))
Ejemplo n.º 5
0
def plot_vel_fil(vel_prof,dists,y):
    #Generates plot of velocities along a filament
    #Generate plot
    if yt.is_root():
        plot = plt.figure()
        #Set up x and y values, and correctly fill and ensure shape is correct
        x = np.empty((dists.shape[0],y.shape[0]))
        yy = np.empty((vel_prof.shape[0],y.shape[0]))
        x = x.T
        for i in xrange(vel_prof.shape[0]):
            yy[i] = y
        for i in xrange(y.shape[0]):
            x[i] = dists
        x = x.T
        #Ensure our colorbar is in correct form
        colormax = np.nanmax(vel_prof)
        colormin = abs(np.nanmin(vel_prof))
        colormax = max([colormax,colormin])
        colormin = 0 - colormax

        print( x.shape, yy.shape, vel_prof.shape)
                
        #Create a 'probability' map plot
        vel_map = plt.pcolormesh(x,yy,vel_prof,cmap='seismic', vmin =colormin, vmax=colormax)
        #Label graph
        plt.xlabel("Distance along filament (Mpc)")
        plt.ylabel("Radius from Filament (Mpc)")
        cbar = plt.colorbar(vel_map)
        cbar.ax.set_ylabel("Velocity (km/s)")
  
    
    
        return plot
def apply_negative_radial_fit(name,Ncores):

    KK=np.load(name+'_KK.npy')
    KT=KK[0]
    KD=KK[1]

    Nbins=len(KT)
    my_storage = {}
    maps=[]
    print('computing optimization')
    indices=range(Nbins)
    for sto, kt in yt.parallel_objects(set(KT), Ncores, storage = my_storage):

        output = temporal_turbulence(name,kt)
        sto.result_id = output
        sto.result = kt

    print('applying optimization')
    my_storage2 = {}
    for sto, ij in yt.parallel_objects(indices, Ncores, storage = my_storage2):
        mapa=apply_temp(name,KT,KD,ij,Nbins)
        sto.result_id = ij
        sto.result = mapa
    if yt.is_root():
        for fn, vals in sorted(my_storage2.items()):
            if ij==0:
                mapa=vals
            else:
                mapa+=vals
        np.save(name+'_negative_fit_%02d' %Nbins,mapa)
def Circulation_negative_turbulence(name,Nbins,start,kmin,kmax,Nk,Ncores):
    ktarray=np.exp(np.linspace(np.log(kmin),np.log(kmax),Nk))
    my_storage = {}
    kdmatrix=[]
    errmatrix=[]
    for sto, kt in yt.parallel_objects(ktarray, Ncores, storage = my_storage):

        output = Circulation_negative_fits(name,kt,Nbins,start)
        sto.result_id = kt
        sto.result = output


    if yt.is_root():
        for fn, vals in sorted(my_storage.items()):
            kdmatrix.append(vals[0])
            errmatrix.append(vals[1])
            rcen = vals[2]
        kdmatrix=np.array(kdmatrix)
        errmatrix=np.array(errmatrix)

        KT=np.zeros(Nbins)
        KD=np.zeros(Nbins)
        for j in range(Nbins):
            optimo=np.where(errmatrix[:,j]==errmatrix[:,j].min())
            KD[j]=kdmatrix[optimo,j]
            KT[j]=ktarray[optimo]

        KK=np.array([KT,KD])
        np.save(name+'_KK',KK)
        return KT,KD
Ejemplo n.º 8
0
def slices(ds, slcfname, slc_fields):
    global aux
    ds.coordinates.x_axis[1] = 0
    ds.coordinates.x_axis['y'] = 0
    ds.coordinates.y_axis[1] = 2
    ds.coordinates.y_axis['y'] = 2

    time = ds.current_time.in_units('Myr').v
    c = ds.domain_center
    dx = ds.domain_width / ds.domain_dimensions

    slc_data = {}
    slc_data['time'] = time
    ya.check_aux(slc_fields)

    for i, axis in enumerate(['x', 'y', 'z']):
        slc = yt.SlicePlot(ds, axis, slc_fields)
        ix = ds.coordinates.x_axis[axis]
        iy = ds.coordinates.y_axis[axis]
        res = (slc.width[1] / dx[ix], slc.width[0] / dx[iy])

        slc_frb = slc.data_source.to_frb(slc.width[0], res, c, slc.width[1])
        extent = np.array(slc_frb.bounds) / 1.e3
        slc_data[axis] = {}
        slc_data[axis + 'extent'] = extent
        for f in slc_fields:
            if aux[f].has_key('unit'):
                slc_data[axis][f] = np.array(slc_frb[f].in_units(
                    aux[f]['unit']))
            else:
                slc_data[axis][f] = np.array(slc_frb[f])
            if aux[f].has_key('factor'): slc_data[axis][f] *= aux[f]['factor']

    if yt.is_root():
        pickle.dump(slc_data, open(slcfname, 'wb'), pickle.HIGHEST_PROTOCOL)
Ejemplo n.º 9
0
def phase(sq, phfname, bin_fields):
    global aux
    pdfs = my_pdf(sq)

    for bf in bin_fields:
        n_bins = (aux[bf[0]]['n_bins'], aux[bf[1]]['n_bins'])
        logs = {}
        unit = {}
        extrema = {}
        for b in bf:
            logs[b] = aux[b]['log']
            if aux[b].has_key('unit'): unit[b] = aux[b]['unit']
            if aux[b].has_key('limits'): extrema[b] = aux[b]['limits']
        pdf = yt.create_profile(sq,
                                bf,
                                fields=fields,
                                n_bins=n_bins,
                                logs=logs,
                                extrema=extrema,
                                units=unit,
                                weight_field=None,
                                fractional=True)
        pdfs.add_pdf(pdf, bf)
    if yt.is_root():
        pickle.dump(pdfs, open(phfname, 'wb'), pickle.HIGHEST_PROTOCOL)
Ejemplo n.º 10
0
def slices(ds, slcfname, slc_fields, aux={}):
    ds.coordinates.x_axis[1] = 0
    ds.coordinates.x_axis['y'] = 0
    ds.coordinates.y_axis[1] = 2
    ds.coordinates.y_axis['y'] = 2

    time = ds.current_time.in_units('Myr').v
    c = ds.domain_center
    dx = ds.domain_width / ds.domain_dimensions

    slc_data = {}
    slc_data['time'] = time

    for i, axis in enumerate(['x', 'y', 'z']):
        slc = yt.SlicePlot(ds, axis, slc_fields)
        ix = ds.coordinates.x_axis[axis]
        iy = ds.coordinates.y_axis[axis]
        #        res=(int(slc.width[1]/dx[ix]),int(slc.width[0]/dx[iy]))
        res = (ds.domain_dimensions[ix], ds.domain_dimensions[iy])
        slc_frb = slc.data_source.to_frb(slc.width[0], res, c, slc.width[1])
        extent = np.array(slc_frb.bounds) / 1.e3
        slc_data[axis] = {}
        slc_data[axis + 'extent'] = extent
        for f in slc_fields:
            slc_data[axis][f] = np.array(slc_frb[f])
            if f in aux:
                if 'unit' in aux[f]:
                    slc_data[axis][f] = np.array(slc_frb[f].in_units(
                        aux[f]['unit']))
                    #print(f,aux[f]['unit'])
                if 'factor' in aux[f]: slc_data[axis][f] *= aux[f]['factor']

    if yt.is_root():
        pickle.dump(slc_data, open(slcfname, 'wb'), pickle.HIGHEST_PROTOCOL)
Ejemplo n.º 11
0
def main(**kwargs):

    dir = kwargs['base_directory'] + kwargs['directory']
    fname = glob.glob(dir + 'id0/' + kwargs['id'] + '.????.vtk')
    fname.sort()

    ngrids = len(glob.glob(dir + 'id*/' + kwargs['id'] + fname[-9:]))
    comm = yt.communication_system.communicators[-1]
    nprocs = comm.size
    print ngrids, nprocs

    if yt.is_root():
        if not os.path.isdir(dir + 'phase/'): os.mkdir(dir + 'phase/')

    for f in fname:
        phfname = dir + 'phase/' + kwargs['id'] + f[-9:-4] + '.phase.p'
        if os.path.isfile(phfname):
            print '%s is already there' % phfname
        else:
            if ngrids > nprocs: ds = yt.load(f, units_override=unit_base)
            else: ds = yt.load(f, units_override=unit_base, nprocs=nprocs * 8)
            le = np.array(ds.domain_left_edge)
            re = np.array(ds.domain_right_edge)
            sq = ds.box(le, re)
            pdfs = my_pdf(sq)

            for bf in bin_fields:
                n_bins = (aux[bf[0]]['n_bins'], aux[bf[1]]['n_bins'])
                logs = {}
                unit = {}
                extrema = {}
                for b in bf:
                    logs[b] = aux[b]['log']
                    if aux[b].has_key('unit'): unit[b] = aux[b]['unit']
                    if aux[b].has_key('limits'): extrema[b] = aux[b]['limits']
                pdf = yt.create_profile(sq,
                                        bf,
                                        fields=fields,
                                        n_bins=n_bins,
                                        logs=logs,
                                        extrema=extrema,
                                        units=unit,
                                        weight_field=None,
                                        fractional=True)
                pdfs.add_pdf(pdf, bf)
            if yt.is_root():
                pickle.dump(pdfs, open(phfname, 'wb'), pickle.HIGHEST_PROTOCOL)
Ejemplo n.º 12
0
def yt_derived_field_demo():
    ds = yt.frontends.libyt.libytDataset()
    slc1 = yt.SlicePlot(ds, "z", ("gamer", "level_derived_func"))
    slc2 = yt.SlicePlot(ds, "z", ("gamer", "level_derived_func_with_name"))

    if yt.is_root():
        slc1.save()
        slc2.save()
Ejemplo n.º 13
0
def yt_inline_inputArg( fields ):
    # Get data
    ds = yt.frontends.libyt.libytDataset()

    # Do ProjectionPlot to fields input by gamer
    sz = yt.ProjectionPlot(ds, 'z', fields, center='c')

    if yt.is_root():
        sz.save()
Ejemplo n.º 14
0
def yt_inline_ProjectionPlot( fields ):
    # Load the data, just like using yt.load()
    ds = yt.frontends.libyt.libytDataset()

    # Do yt operation
    prjz = yt.ProjectionPlot(ds, 'z', fields)

    # Include this line, otherwise yt will save one copy in each rank.
    if yt.is_root():
        prjz.save()
Ejemplo n.º 15
0
def do_projection(ds, surfname):
    proj = ds.proj('density', axis='z')
    w = ds.domain_width[0]
    res = (ds.domain_dimensions[0], ds.domain_dimensions[1])
    frb = proj.to_frb(width=w, resolution=res)
    surf = np.array(frb['density'].in_units('Msun/pc**2'))
    if yt.is_root():
        pickle.dump({
            'data': surf,
            'bounds': frb.bounds
        }, open(surfname, 'wb'), pickle.HIGHEST_PROTOCOL)
Ejemplo n.º 16
0
def projection(ds, surfname):
    time = ds.current_time.in_units('Myr').v
    ds.coordinates.x_axis[1] = 0
    ds.coordinates.x_axis['y'] = 0
    ds.coordinates.y_axis[1] = 2
    ds.coordinates.y_axis['y'] = 2

    c = ds.domain_center
    dx = ds.domain_width / ds.domain_dimensions

    surf_data = {}
    surf_data['time'] = time
    for i, axis in enumerate(['x', 'y', 'z']):
        proj = ds.proj('density', axis=axis)
        ix = ds.coordinates.x_axis[axis]
        iy = ds.coordinates.y_axis[axis]
        res = (ds.domain_dimensions[ix], ds.domain_dimensions[iy])
        frb = proj.to_frb(ds.domain_width[ix], res, c, ds.domain_width[iy])
        surf = np.array(frb['density'].in_units('Msun/pc**2'))
        bounds = np.array(frb.bounds)
        surf_data[axis] = {'data': surf, 'bounds': bounds}
    if yt.is_root():
        pickle.dump(surf_data, open(surfname, 'wb'), pickle.HIGHEST_PROTOCOL)

    scal_fields = ya.get_scalars(ds)
    for nscal, sf in enumerate(scal_fields):
        scal_data = {}
        scal_data['time'] = time
        for i, axis in enumerate(['x', 'y', 'z']):
            proj = ds.proj(sf, axis=axis, weight_field='density')
            ix = ds.coordinates.x_axis[axis]
            iy = ds.coordinates.y_axis[axis]
            res = (ds.domain_dimensions[ix], ds.domain_dimensions[iy])
            frb = proj.to_frb(ds.domain_width[ix], res, c, ds.domain_width[iy])
            scal = np.array(frb[sf])
            bounds = np.array(frb.bounds)
            scal_data[axis] = {'data': scal, 'bounds': bounds}
        if yt.is_root():
            scalfname = surfname.replace('surf.p', 'scal{}.p'.format(nscal))
            pickle.dump(scal_data, open(scalfname, 'wb'),
                        pickle.HIGHEST_PROTOCOL)
Ejemplo n.º 17
0
def yt_inline_ParticlePlot():
    ds = yt.frontends.libyt.libytDataset()

    ## ParticleProjectionPlot
    #==========================
    # par = yt.ParticleProjectionPlot(ds, "z")

    ## ParticlePlot
    #==========================
    par = yt.ParticlePlot(ds, "particle_position_x", "particle_position_y", "Level", center = 'c')
    if yt.is_root():
        par.save()
Ejemplo n.º 18
0
def yt_inline():
    # Get data
    ds = yt.frontends.libyt.libytDataset()

    # Do ProjectionPlot to field Cloud0.
    sz = yt.ProjectionPlot(ds, 'z', ('gamer', 'Cloud0'), center='c')

    # Do ParticlePlot
    par = yt.ParticlePlot(ds, 'particle_position_x', 'particle_position_y', 'particle_mass', center='c')

    if yt.is_root():
        sz.save()
        par.save()
Ejemplo n.º 19
0
def yt_plot_mins():
    initialize()
    tseries_toplot = yt.DatasetSeries(toplot, parallel=True)
    logging.getLogger().setLevel(logging.INFO)
    for ds in tseries_toplot.piter():
        min, loc_min = ds.h.find_min('eint')
        print 'Plotting %s' % ds.basename
        plotSlices(ds, zoom_fac=4, center=loc_min, drawnozzle=False,\
                   markcenter=True, proj_axes=['x', 'y', 'z'],\
                   fields=['eint', 'pressure'])
    if yt.is_root():
        t2 = time.time()
        print 'Total time: %.2f s\n--\ninitialization: %.2f s\nploting: %.2f'\
               % (t2-t0, t1-t0, t2-t1)
Ejemplo n.º 20
0
def Circulation_negative_optimum_mpi(name,kmin,kmax,Nk,Ncores):
    ktarray=np.exp(np.linspace(np.log(kmin),np.log(kmax),Nk))
    my_storage = {}
    kdarray=[]
    erarray=[]
    for sto, kt in yt.parallel_objects(ktarray, Ncores, storage = my_storage):

        output = Circulation_negative_optimum_thread(name,kt)
        sto.result_id = kt
        sto.result = output

    if yt.is_root():
        for fn, vals in sorted(my_storage.items()):
            kdarray.append(vals[0])
            erarray.append(vals[1])
        kdarray=np.array(kdarray)
        erarray=np.array(erarray)

        optimo=np.where(erarray==erarray.min())
        return ktarray[optimo],kdarray[optimo]
Ejemplo n.º 21
0
def main(**kwargs):

    dir = kwargs['base_directory'] + kwargs['directory']
    fname = glob.glob(dir + 'id0/' + kwargs['id'] + '.????.vtk')
    fname.sort()

    ngrids = len(glob.glob(dir + 'id*/' + kwargs['id'] + fname[-9:]))
    comm = yt.communication_system.communicators[-1]
    nprocs = comm.size
    print ngrids, nprocs

    if yt.is_root():
        if not os.path.isdir(dir + 'surf/'): os.mkdir(dir + 'surf/')
    for f in fname:
        surfname = dir + 'surf/' + kwargs['id'] + f[-9:-4] + '.surf.p'
        if os.path.isfile(surfname):
            print '%s is already there' % surfname
        else:
            if ngrids > nprocs: ds = yt.load(f, units_override=unit_base)
            else: ds = yt.load(f, units_override=unit_base, nprocs=nprocs * 8)
            do_projection(ds, surfname)
def get_halo_sphere_particles(my_halo, par_file, radius_factor=5):
    pf = yt.load(par_file)
    halo_catalog = os.path.join(pf.fullpath, 'MergerHalos.out')
    if 'center' in my_halo:
        my_halo_data = my_halo
        my_halo_data["center"] = pf.arr(my_halo['center'][0], my_halo['center'][1])
    else:
        my_halo_data = read_from_catalog(my_halo, halo_catalog)
    rho_crit = pf.quan(rho_crit_now, "g/cm**3") * pf.hubble_constant**2 * \
        (1 + pf.current_redshift)**3
    if 'mass' in my_halo:
        if isinstance(my_halo_data['mass'], tuple):
            units = my_halo_data['mass'][1]
        else:
            units = "Msun"
        hmass = pf.quan(my_halo_data['mass'][0], units)
    else:
        hmass = None

    if 'radius' in my_halo:
        r_200 = pf.quan(my_halo['radius'][0], my_halo['radius'][1])
    else:
        r_200 = (((3. * hmass) /
                  (4. * np.pi * rho_crit * 200.))**(1./3.)).to("Mpc")

    if yt.is_root():
        print ("Reading particles for a sphere surrounding halo %d." % my_halo['id'])
        print ("Halo %d, pos: %f, %f, %f, mass: %s, r_200: %s." % \
            (my_halo_data['id'], my_halo_data['center'][0], my_halo_data['center'][1],
             my_halo_data['center'][2], hmass, r_200))

    my_sphere = pf.h.sphere(my_halo_data['center'], radius_factor * r_200)
    return (my_halo_data['center'],
            my_sphere['particle_index'], 
            my_sphere['particle_mass'].in_units('Msun'),
            np.array([my_sphere['particle_position_x'],
                      my_sphere['particle_position_y'],
                      my_sphere['particle_position_z']]))
Ejemplo n.º 23
0
def run():
    input_fn, output_fn, selection, group = sys.argv[1:5]
    njobs = int(sys.argv[5])
    dynamic = bool(int(sys.argv[6]))

    a = ytree.load(input_fn)
    if "test_field" not in a.field_list:
        a.add_analysis_field("test_field", default=-1, units="Msun")

    trees = list(a[:8])

    for tree in trees:
        for node in ytree.parallel_tree_nodes(tree,
                                              group=group,
                                              njobs=njobs,
                                              dynamic=dynamic):
            root = node.root
            yt.mylog.info(
                f"Doing {node.tree_id}/{root.tree_size} of {root._arbor_index}"
            )
            node["test_field"] = 2 * node["mass"]

    if yt.is_root():
        a.save_arbor(filename=output_fn, trees=trees)
def get_halo_indices(my_halo, dataset, method='sphere', radius_factor=5.0):
    shifted = np.zeros(3, dtype=bool)

    if method == 'halo':
        halo_indices, particle_masses, particle_positions = \
            get_halo_particles(my_halo, dataset)
    elif method == 'sphere':
        halo_com, halo_indices, particle_masses, particle_positions = \
            get_halo_sphere_particles(my_halo, dataset, radius_factor=radius_factor)

    unitary_1 = halo_com.to("unitary").uq
    for i, axis in enumerate(axes):
        if particle_positions[i].max() - particle_positions[i].min() > 0.5:
            if yt.is_root():
                print ("Halo periodic in %s." % axis)
            particle_positions[i] -= 0.5
            particle_positions[i][particle_positions[i] < 0.0] += 1.0
            halo_com[i] -= 0.5 * unitary_1
            if halo_com[i] < 0.0:
                halo_com[i] += 1.0 * unitary_1
            shifted[i] = True
    if method == 'halo':
        halo_com = (particle_positions * particle_masses).sum(axis=1) / particle_masses.sum()
    return (halo_indices, halo_com, shifted)
Ejemplo n.º 25
0
def plot_a_vr(path,
              header,
              cycle,
              use_ghost_zones=True,
              annotate=True,
              rotate=False,
              zoom=False):
    """
    volume rendering plot

    path [string]  : the path of data files
    header [string]: the header of the data file
    cycle [int]    : the data cycle

    ex. for file: /data/ccsn3d_hdf5_plt_cnt_0100

        path   = "/data"
        header = "ccsn3d"
        cycle  = 100
    """

    fn = get_fn(path, header, cycle)
    ds = yt.load(fn)

    # register new units for entropy
    ds.unit_registry.add('kB',
                         1.3806488e-16,
                         dimensions=energy / temperature,
                         tex_repr='k_{B}')
    ds.unit_registry.add('by', 1.674e-24, dimensions=mass, tex_repr='baryon')

    # create a new derived field: Entropy
    def _entr(field, data):
        """
	fixed the entropy units in flash
        """
        entr = data["entr"]
        kb_by = yt.YTQuantity(1, 'erg') / yt.YTQuantity(
            1, 'K') / yt.YTQuantity(1, 'g') * (1.3806488e-16 / 1.674e-24)
        return entr * kb_by

    ds.add_field("Entr",
                 function=_entr,
                 units="kB/by",
                 display_name="Entropy",
                 dimensions=energy / temperature / mass)

    # create a new derived field: Entropy
    def _entrdens(field, data):
        """
        create a new derived field to show both entropy and density

        if density > PNS_DENSITY:
            entropy = PNS_ENTR
        else:
            entropy = entropy

        """
        dens = data["dens"]
        entr = data["entr"]
        entrdens = entr * (
            np.exp(-(dens.in_cgs() / PNS_DENSITY)**5)) + PNS_ENTR
        kb_by = yt.YTQuantity(1, 'erg') / yt.YTQuantity(
            1, 'K') / yt.YTQuantity(1, 'g') * (1.3806488e-16 / 1.674e-24)
        return entrdens * kb_by

    ds.add_field("Entropy",
                 function=_entrdens,
                 units="kB/by",
                 display_name="Entropy",
                 dimensions=energy / temperature / mass)

    #debug: also plot a entropy slice
    if yt.is_root():
        pc = yt.SlicePlot(ds, 'z', 'Entr')
        pc.zoom(40)
        pc.set_log('Entr', False)
        pc.save('fig_slice_z_' + header + '_' + string.zfill(cycle, 4) +
                '.png')

    # get the entropy range from time
    time = ds.current_time.in_cgs().v - TSHIFT

    if CUSTOM_EMAX:
        emin, emax = get_emin_emax(time)
    else:
        entropy_max, pe = ds.find_max('Entropy')
        emax = entropy_max.v - 1.0
        emin = emax - 3.0

    if yt.is_root():
        print "emin/emax:", emin, emax
        # check if emax > emin
        if emax <= emin:
            print "Error: emax <= emin"
            quit()

    # only render the region with r < 1.e8 cm
    # this is necessary if we want to include ghost zones
    sphere = ds.sphere([0, 0, 0], (1.e8, 'cm'))
    sc = yt.create_scene(sphere, field='Entropy')

    # set the camera resolution and width
    sc.camera.north_vector = [0, 0, 1]
    sc.camera.resolution = (VR_RESOLUTION, VR_RESOLUTION)
    sc.camera.set_width(ds.quan(VR_WIDTH, 'cm'))

    #sc.camera.set_width(ds.quan(2.5e7,'cm'))

    # define the tranfer function for high entropy region
    def linearFunc(values, minv, maxv):
        try:
            return ((values) - values.min()) / (values.max() - values.min())
            #return (na.sqrt(values) - values.min())/(values.max()-values.min())
        except:
            return 1.0

    source = sc.get_source(0)
    source.set_use_ghost_zones(use_ghost_zones)  # *** SLOW ***
    source.set_log(False)
    source.grey_opacity = True

    # create a transfer function helper
    tfh = TransferFunctionHelper(ds)
    tfh.set_field('Entropy')
    tfh.set_bounds()
    tfh.set_log(False)
    tfh.build_transfer_function()

    # add a thin layer to show the shock front
    esh, ew = get_shock_entropy(time, emin, emax)
    tfh.tf.add_layers(1,
                      w=(ew),
                      mi=(esh),
                      ma=(esh + 0.5),
                      col_bounds=[4.0, (esh + 1.0)],
                      alpha=10.0 * esh * np.ones(1),
                      colormap='cool_r')

    # plot the PNS at entr = PNS_ENTR
    tfh.tf.add_layers(1,
                      w=0.5,
                      mi=0.49,
                      ma=0.55,
                      col_bounds=[0.05, 0.55],
                      alpha=100.0 * np.ones(1),
                      colormap='Purples')

    # map the high entropy region: version 3
    tfh.tf.map_to_colormap(emin,
                           emax,
                           scale=100.0,
                           scale_func=linearFunc,
                           colormap='autumn')

    # version 1: use many layers
    #tfh.tf.add_layers(12,w=0.05,mi=emin,ma=emax,col_bounds=[emin,emax],
    #        alpha=70*np.linspace(0.0,1.5,10),colormap='hot')

    tfh.tf.grey_opacity = True
    source.set_transfer_function(tfh.tf)
    source.grey_opacity = True

    # plot the transfer function
    #source.tfh.plot('fig_transfer_function_entr.png', profile_field='cell_mass')

    # plot volume rendering plot without annotation
    if not annotate:
        sc.save('fig_vr_' + header + '_' + string.zfill(cycle, 4) + '.png',
                sigma_clip=4)

    else:
        # with annotation
        sc.annotate_axes(alpha=0.8)
        #sc.annotate_scale()  #

        # line annotation
        #annotate_width = 1.0e7  # [cm]
        #annotate_axis  = 1.5e7  # [cm]
        #annotate_at    = (annotate_axis/2.0 - annotate_width/(2.0*np.sqrt(2.0)))

        #colors   = np.array([[0.45,0.5,0.5,0.7]]) # white
        #vertices = np.array([[[annotate_at,0.,0.],[0.,annotate_at,0.]]])*annotate_width*cm
        #lines    = LineSource(vertices,colors)
        #sc.add_source(lines)

        #sc.annotate_domain(ds,color=[1,1,1,0.01])
        #text_string= "Time = %.1f (ms)" % (float(ds.current_time.to('s')*1.e3))
        text_string = "Time = %.1f (ms)" % (float(time * 1.e3))
        sc.save_annotated("fig_vr_" + header + "_annotated_" +
                          string.zfill(cycle, 4) + '.png',
                          sigma_clip=4.0,
                          label_fmt="%.1f",
                          text_annotate=[[(0.05, 0.95), text_string,
                                          dict(color="w",
                                               fontsize="20",
                                               horizontalalignment="left")]])

    # rotate the camera
    if DO_ROT:
        rotate = True

    if rotate:
        fstart = 1  # modify here for restart
        frames = ROT_FRAMES  # total number of frames for rotation
        cam = sc.camera
        i = 0
        for _ in cam.iter_rotate(2.0 * np.pi, frames):
            i += 1
            if i >= fstart:
                sc.render()
                if not annotate:
                    sc.save("fig_vr_" + header + "_" + string.zfill(cycle, 4) +
                            '_rot_' + string.zfill(i, 4) + '.png',
                            sigma_clip=2)
                else:
                    sc.save_annotated("fig_vr_" + header + "_annotated_" +
                                      string.zfill(cycle, 4) + '_rot_' +
                                      string.zfill(i, 4) + '.png',
                                      sigma_clip=4.0,
                                      text_annotate=[[
                                          (0.05, 0.95), text_string,
                                          dict(color="w",
                                               fontsize="20",
                                               horizontalalignment="left")
                                      ]])

    # TODO: zoom in or zoom out
    if DO_ZOOM:
        zoom = True

    if zoom:
        fstart = 0  # modify here for restart
        frames = ZOOM_FRAMES
        cam = sc.camera
        i = 0
        for _ in cam.iter_zoom(ZOOM_FACT, ZOOM_FRAMES):
            i += 1
            if i >= fstart:
                sc.render()
                if not annotate:
                    sc.save("fig_vr_" + header + "_" + string.zfill(cycle, 4) +
                            '_zoom_' + string.zfill(i, 4) + '.png',
                            sigma_clip=2)
                else:
                    sc.save_annotated("fig_vr_" + header + "_annotated_" +
                                      string.zfill(cycle, 4) + '_zoom_' +
                                      string.zfill(i, 4) + '.png',
                                      sigma_clip=4.0,
                                      text_annotate=[[
                                          (0.05, 0.95), text_string,
                                          dict(color="w",
                                               fontsize="20",
                                               horizontalalignment="left")
                                      ]])
        quit()

    return
Ejemplo n.º 26
0
def gen_profs(ds,fils,dsname,keep=True,totalratio=None,mask=False):
    x = np.zeros(10)
    

    
    storage = {}
    #Check to see if totalratio has been set, else determine it.
    if totalratio == None:
    #Determine simulation-wide ratio of baryon to Total matter, done in parallel to speed up
        for stor,i in yt.parallel_objects(range(1),storage = storage):
                        
            denstotal = ds.all_data().quantities.weighted_average_quantity("density","cell_volume")
            dmtotal = ds.all_data().quantities.weighted_average_quantity("dark_matter_density","cell_volume")
            
            totalratio = denstotal / (dmtotal + denstotal)
            del dmtotal

            stor.result = totalratio,denstotal.in_units('g/cm**3')
            stor.result_id = 1

        totalratio,denstotal = storage[1]
    print totalratio # print to stdout, allows us to check ratio makes sense/debug
    del storage
    #Print to a file, allows checking of progress when in job queue
    if yt.is_root():
        with open("Testinfo.dat",'w') as f:
            f.write('Ratio determined')
    print("All Ratio determined")
    x = []

    storage = {}
    #Gather list of all profiles on disk
    filelist = sorted(os.listdir(''.join(['/shome/mackie/data',dsname,'/profiles'])))
    #Include only density profiles
    filelist = filelist[:len(filelist)/2]
    #Load a numpy mask array if required
    if keep == True:
        keeplist = np.load(''.join(['/shome/mackie/data',dsname,'/filkeep.npy']))
    
    
    for stor,file_in_dir in yt.parallel_objects(filelist,storage=storage):
        
        filnum = int(file_in_dir[7:10])
        segnum = int(file_in_dir[13:16])

        prof = yt.load(''.join(['/shome/mackie/data',dsname,'/profiles/',file_in_dir]))
        dm = prof.data['dark_matter_density']
        dens = prof.data['density'].in_units('g/cm**3')
        
        result = (dens /( dm + dens)) - totalratio
        dens = dens/denstotal
        result = result.v
        if keep == True:
            if keeplist[filnum][segnum] == True:    
                stor.result = result,dens
                stor.result_id = file_in_dir
        else:
            stor.result = result,dens
            stor.result_id = file_in_dir
        del prof,dm,dens,filnum,segnum
    
    results = []
    denresult = []
    for keys,values in storage.items():
        results.append(values[0])
        denresult.append(values[1])
    results = np.array(results)
    denresult = np.array(denresult)

    if mask:
        results = np.ma.masked_array(results,mask=~mask,fill_value=np.nan)
#Get x bins
    x = yt.load(''.join(['/shome/mackie/data',dsname,'/profiles/densfil000seg000.h5'])).data['x']

    if yt.is_root():
        with open("Testinfo.dat",'a') as f:
            f.write("returning results to plot")

        print results
    
    return results,denresult,x
Ejemplo n.º 27
0
def yt_inline_ProfilePlot():
    ds = yt.frontends.libyt.libytDataset()
    profile = yt.ProfilePlot(ds, "x", ["density"])
    if yt.is_root():
        profile.save()
Ejemplo n.º 28
0
            if scale == scale_in_file:
                halo_file = this_file
                break
    return halo_file


if __name__ == "__main__":

    args = parse()

    import yt
    from yt.analysis_modules.sunrise_export import sunrise_octree_exporter
    from yt.analysis_modules.halo_finding.halo_objects import RockstarHaloList  
    yt.enable_parallelism()
    
    if yt.is_root():
        print '/nStarting '+ sys.argv[0]
        print 'Parsed arguments: '
        print args
        print

    # Get parsed values
    sim_dirs, snap_base = args['sim_dirs'], args['snap_base']
    print 'Analyzing ', sim_dirs

    out_dir = args['out_dir']
    modify_outdir = 0
    if  'sim_dir' in out_dir: 
        out_dir = out_dir.replace('sim_dir','')
        modify_outdir = 1 
Ejemplo n.º 29
0
def plot_vel(filament,ds,dataset,fil=-1,maskarray=False):
    #Routine to plot velocity of particles along a filament
    #Set gravitational constant
    G = YTQuantity(6.67408E-11,'m**3/(kg * s**2)')
    #Gather velocity and density values from disk, done in parallel to speed computation
    #We first gather a list of the profiles on the disk, then reshape this into a list of [density,other] profiles
    #This is then iterated over in parallel to load the correct data
    filelist = sorted(os.listdir(''.join(['/shome/mackie/data/',dataset,'/profiles'])))
    profnumbers = len(filelist)/2
    files = [ [filelist[i],filelist[i+profnumbers]] for i in range(profnumbers)] 
    del filelist,profnumbers
    



    storage = {}
    for stor,file_in_dir in ytpar.parallel_objects(files,storage=storage):
        
        #Determines correct index to give to segment profiles
        filnum = int(file_in_dir[0][7:10])
        segnum = int(file_in_dir[0][13:16])
        #Calc total density for each segment 
        densprof = yt.load(''.join(['/shome/mackie/data/',dataset,'/profiles/',file_in_dir[0]]))
        dm = densprof.data['dark_matter_density'].in_units('g/cm**3')
        dens = densprof.data['density'].in_units('g/cm**3')
        totaldens = dm + dens
        del densprof,dm,dens
        #Get velocity profiles
        velprof = yt.load(''.join(['/shome/mackie/data/',dataset,'/profiles/',file_in_dir[1]]))
        vel = velprof.data['cylindrical_radial_velocity'].in_units('km/s')

        stor.result = (vel,totaldens)
        stor.result_id = (filnum,segnum)



        
    #Restruct dict into np array of correct structure.
    vel_profs = [ [] for i in range(len(filament))]
    densprofs = [ [] for i in range(len(filament))]
    x = yt.load(''.join(['/shome/mackie/data/',dataset,'/profiles/',file_in_dir[0]])).data['x']
    xarr = [[] for i in range(len(filament))]
    for key,values in sorted(storage.items()):
        filnum,segnum = key
        vel,dens = values
        xarr[filnum].append(x.in_units('Mpc'))
        
        vel_profs[filnum].append(vel.in_units('km/s'))
        densprofs[filnum].append(dens)
    for i in range(len(xarr)):
        xarr[i] = YTArray(np.array(xarr[i]),'Mpc')
        vel_profs[i] = YTArray(np.array(vel_profs[i]),'km/s')
    vel_profs = YTArray(np.array(vel_profs),'km/s')
    xarr = YTArray(np.array(xarr),'Mpc')
    del storage
    #Turn into np arrays for QoL
    densprofs = np.array(densprofs)
    #Gather x bins from disk
    
    #Determine masses and thus escape velocities
    
      
    mass = [get_masses(densprofs[i],x,filament[i],ds,accumulate=True) for i in range(len(filament))]

    mass = np.array(mass)
    mass = YTArray(mass,'g')
    print mass[1][1]
    
    del densprofs
    
    print xarr[1][1]

    vel_ratio = ( ( (2*G* mass) / xarr) ** (1.0/2.0))
    vel_ratio = vel_ratio.in_units('km/s')

    if yt.is_root():                        
        print mass[1][1]
        print xarr[1][1]
        print vel_ratio[1][1]
        
#vel_ratio is **approx** escape vel, used to ratio later
    #Generate ratio of velocity to escape velocity
    vel_profs = (vel_profs.in_units('km/s')/vel_ratio.in_units('km/s'))
    del vel_ratio
    



    if fil > -1:
        
        length_plot =  plot_vel_fil(vel_profs[fil].v,gen_dists(filament[fil],ds),x)
    else:
        length_plot = None

    if maskarray:
        print"Masking Data"
        vel_profs = np.ma.masked(vel_profs, mask = ~maskarray,fill_value=np.nan)
    #Flatten vel profs, ought to bemore elegant solution
    vel_prof_flatten = []    
    for fil in vel_profs:
        for seg in fil:
            vel_prof_flatten.append(seg)
    vel_profs = np.array(vel_prof_flatten)
    del vel_prof_flatten

    plot = probmap.prob_heat_map(vel_profs,'radial_velocity',x=x)
    
    return plot,length_plot
Ejemplo n.º 30
0
imin, imax = None, None
if len(sys.argv) > 1:
    imin = int(sys.argv[1])
    imax = int(sys.argv[2])
if len(sys.argv) > 3:
    if str(sys.argv[3]) != "&":
        weight_field = str(sys.argv[3])

if imin is None:
    dsfiles = [np.sort(glob.glob("DD????/DD????"))[-1]]
else:
    dsfiles = [
        "DD%0004i/DD%0004i" % (i, i) for i in np.arange(imin, imax + 1, 1)
    ]

if yt.is_root():
    print(dsfiles)

fields = [('gas', 'number_density'), 'Temperature', ('gas', 'photo_gamma'),
          ('gas', 'metallicity'), ('gas', 'C_over_N'), ('gas', 'C_over_H'),
          ('gas', 'O_over_H'), ('gas', 'Fe_over_H')]

fields = [('gas', 'number_density'), 'Temperature', ('gas', 'O_over_H'),
          ('gas', 'photo_gamma')]

#
# fields = [('gas','number_density')]
#


def format_plot(ds, sp, fields):
Ejemplo n.º 31
0
def prob_heat_map(profiles,var,x=np.empty(10)):
    #routine to generate a probability heat map for a variable(e.g. density/temperature) in a filament. Will also plot mean and median to this map.
    
    '''
    if 'densdiff' not in var:
    #set up x values, and create empty arrays
        x = profiles[0].x
        data = np.empty([len(profiles),x.shape[0]])
        mask = np.ones([len(profiles),x.shape[0]],dtype=bool)
    
    
    #populate data, and generate a mask for 0 values
        for index in xrange(len(profiles)):
            data[index,] = profiles[index][(var)].v
    
    else:
    '''
    data = np.array(profiles)
    if not("velocity" in var or "densdiff" in var or "densrat" in var):
        data = np.ma.masked_values(data,0.0,atol = 1e-40)
     
    
    data=np.ma.masked_invalid(data)

    
#determine min, max and the binsize for variable
    if yt.is_root(): print(np.ravel(data))
    mind = np.nanmin(data.reshape(-1))
    maxd = np.nanmax(data.reshape(-1))
    print 'max'
    print maxd
    #if mind < 0:
    #   mind = 0 - np.amax([abs(mind),maxd])
    #    maxd = 0 - mind
    bins = 50
    length = maxd - mind
#generate bins for the variable
    if var == "cylindrical_radial_velocity" or var == "densdiff" or var == "densrat":
        y = np.linspace(mind,maxd,bins)      
    else:    
        print mind
        if mind == 0:
            mind = data.min()
        print mind
        y = np.logspace(np.log10(mind),np.log10(maxd),bins)
   #set up prob array
    prob = np.zeros([10,y.size])
#loop over all values of variable and add to relevant bin in prob array

    for i in range(len(data)):
        for j in range(10):
            z = data[i][j]
            if math.isnan(z):
                break
            yindex = 0
    #calculate correct index to increment
            for index in range(y.size):
                if z <= y[index]:
                    yindex = index
                    prob[j,yindex] += 1
                    break
                
    
    mean = np.empty(x.size)
    median = np.empty(x.size)
    #normalize probability array, calculate mean and median
    for index in range(x.size):
        yvals = np.array(prob[index,:])
        summed = np.sum(yvals)
        if summed > 0:
            yvals = yvals / summed
        
        
            prob[index,:] = yvals
        del yvals,summed

    
    for index in range(x.size):
        maskeddata = data[:,index]
        mean[index] = np.nanmean(maskeddata)
        median[index] = np.nanmedian(maskeddata)
    del maskeddata
        
        
        
    
    #now plot
    plot = plt.figure()
    plt.plot(x,mean,'g',label='Mean')
    plt.plot(x,median,'r',label='Median')
   
    CS = plt.pcolormesh(x,y,prob.T,cmap='seismic')
    
    plt.xlabel("Radius (MPC)")
    plt.xscale('log')
    #plt.yscale('log')

    if var.lower() == 'density':
        plt.ylabel("Density (g/cm^3)")
        plt.yscale('log')
        plt.xlim(-0.1,2.5)
        

    elif var.lower() == 'temperature':
        plt.ylabel("Temperature (K)")
        plt.fill_between(x, 10E4 , 10E6 ,color='grey',alpha='0.2')
        plt.yscale('log')
        plt.xlim(-0.1,2.5)
            
    elif var.lower() == 'cylindrical_radial_velocity':
        plt.ylabel("Velocity/Escape Velocity")
        plt.xscale('linear')
        plt.yscale('linear')
        
    elif var.lower() == "densrat":
        plt.ylabel("Local Density/Average Density")
        plt.yscale('linear')
        plt.xlim(-0.1,2.5)

    else:
        plt.ylabel("Baryon Density/ Total Density - Avg. Baryon Fraction")
        plt.yscale('linear')
        plt.xlim(-0.1,2.5)

    
    cbar = plt.colorbar(CS)
    cbar.ax.set_ylabel("Probability")
    plt.legend()
    return plot
Ejemplo n.º 32
0
def main(**kwargs):
    global aux
    #yt.funcs.mylog.setLevel(50)
    dir = kwargs['base_directory'] + kwargs['directory']
    fname = glob.glob(dir + 'id0/' + kwargs['id'] + '.????.vtk')
    fname.sort()

    if yt.is_root(): print fname[0]

    if kwargs['range'] != '':
        sp = kwargs['range'].split(',')
        start = eval(sp[0])
        end = eval(sp[1])
        fskip = eval(sp[2])
    else:
        start = 0
        end = len(fname)
        fskip = 1
    fname = fname[start:end:fskip]

    ngrids = len(glob.glob(dir + 'id*/' + kwargs['id'] + '*' + fname[0][-8:]))
    if kwargs['parallel']:
        from mpi4py import MPI
        comm = MPI.COMM_WORLD
        nprocs = comm.size
        rank = comm.rank
    else:
        nprocs = 1
        rank = 0
    print ngrids, rank, nprocs, yt.is_root()

    do_phase = kwargs['phase']

    if ngrids > nprocs: ds = yt.load(fname[0], units_override=ya.unit_base)
    else: ds = yt.load(fname[0], units_override=ya.unit_base, nprocs=nprocs)

    mhd = ('athena', 'cell_centered_B_x') in ds.field_list
    cooling = ('athena', 'pressure') in ds.field_list
    rotation = kwargs['rotation'] != 0.
    if rotation:
        ya.Omega = ya.YTQuantity(kwargs['rotation'], 'km/s/kpc')
        if kwargs['rotation'] == 280:
            aux = ya.set_aux('starburst')
    if rank == 0:
        print "phase plot:", do_phase
        print "MHD:", mhd
        print "cooling:", cooling
        print "rotation:", rotation, ya.Omega

    bin_fields = []
    bin_fields.append(['nH', 'pok'])
    bin_fields.append(['nH', 'temperature'])
    bin_fields.append(['velocity_z', 'temperature'])
    if rotation:
        bin_fields.append(['dvelocity_magnitude', 'temperature'])
    else:
        bin_fields.append(['velocity_magnitude', 'temperature'])
    if mhd:
        bin_fields.append(['nH', 'mag_pok'])
        bin_fields.append(['nH', 'ram_pok_z'])
        bin_fields.append(['nH', 'plasma_beta'])

    slc_fields = ['nH', 'pok', 'temperature', 'velocity_z', 'ram_pok_z']
    fields_to_draw = ['nH', 'temperature', 'pok', 'velocity_z']
    if mhd:
        slc_fields.append('magnetic_field_strength')
        slc_fields.append('mag_pok')
        fields_to_draw.append('magnetic_field_strength')

    if rank == 0:
        if not os.path.isdir(dir + 'slice/'): os.mkdir(dir + 'slice/')
        if not os.path.isdir(dir + 'surf/'): os.mkdir(dir + 'surf/')
        if not os.path.isdir(dir + 'phase/'): os.mkdir(dir + 'phase/')
    for i, f in enumerate(fname):
        slcfname = dir + 'slice/' + kwargs['id'] + f[-9:-4] + '.slice.p'
        surfname = dir + 'surf/' + kwargs['id'] + f[-9:-4] + '.surf.p'
        if do_phase:
            phfname = dir + 'phase/' + kwargs['id'] + f[-9:-4] + '.phase.p'
        else:
            phfname = f
        if rank == 0:
            print i, compare_files(f, slcfname), compare_files(
                f, surfname), compare_files(f, phfname)
        if compare_files(f,surfname) and \
           compare_files(f,phfname) and \
           compare_files(f,slcfname):
            if rank == 0: print 'all data is already there'
        else:
            if ngrids > nprocs: ds = yt.load(f, units_override=ya.unit_base)
            else: ds = yt.load(f, units_override=ya.unit_base, nprocs=nprocs)
            ya.add_yt_fields(ds, mhd=mhd, rotation=rotation, cooling=cooling)
            if not compare_files(f, surfname):
                if rank == 0: print 'projectiong...'
                projection(ds, surfname)
            if not compare_files(f, slcfname):
                if rank == 0: print 'slicing...'
                slices(ds, slcfname, slc_fields)
            if not compare_files(f, phfname) and do_phase:
                if rank == 0: print 'binning...'
                le = np.array(ds.domain_left_edge)
                re = np.array(ds.domain_right_edge)
                sq = ds.box(le, re)

                phase(sq, phfname, bin_fields)

    for i, f in enumerate(fname):
        slcfname = dir + 'slice/' + kwargs['id'] + f[-9:-4] + '.slice.p'
        surfname = dir + 'surf/' + kwargs['id'] + f[-9:-4] + '.surf.p'
        if do_phase:
            phfname = dir + 'phase/' + kwargs['id'] + f[-9:-4] + '.phase.p'
        else:
            phfname = f

        if i % nprocs == rank:
            if not compare_files(surfname, surfname + 'ng'):
                print 'drawing for %s on %d' % (surfname, rank)
            plot_projection(surfname, f)
            if not compare_files(slcfname, slcfname + 'ng'):
                print 'drawing for %s on %d' % (slcfname, rank)
                plot_slice(slcfname, f, fields_to_draw)
            if not compare_files(phfname, phfname + 'ng') and do_phase:
                if do_phase:
                    print 'drawing for %s on %d' % (phfname, rank)
                    plot_phase(phfname, bin_fields)
def calculate_mass_in_sphere(dd):
	data_sub_dir = dd.name
	a = dd.a	
	r_plus = M*(1 + math.sqrt(1 - a**2))
	min_radius = 0.0*r_plus/4

	start_time = time.time()
	
	# load dataset time series
	
	if (data_Eulerian_rho and not use_Eulerian_rho):
		# derived fields
		@derived_field(name = "rho_E_eff", units = "")
		def _rho_E_eff(field, data, force_override=True):
			r_BL = (data["spherical_radius"]/cm)*(1 + r_plus*cm/(4*data["spherical_radius"]))**2
			Sigma2 = r_BL**2 + (data["z"]*a*M/(r_BL*cm))**2
			Delta = r_BL**2 + (a*M)**2 - 2*M*r_BL
			A = (r_BL**2 + (a*M)**2)**2 - ((a*M)**2)*Delta*(data["x"]**2 + data["y"]**2)/(cm*r_BL)**2
			alpha = pow(Delta*Sigma2/A, 0.5)
			beta = -2*a*(M**2)*r_BL/A 
			return (data["rho"]*alpha - beta*data["S_azimuth"])*pow(data["chi"],-3/2)

	elif ((data_Eulerian_rho and use_Eulerian_rho) or ((not data_Eulerian_rho) and (not use_Eulerian_rho))):
		# derived fields
        	@derived_field(name = "rho_E_eff", units = "")
        	def _rho_E_eff(field, data):
                	return data["rho"]
		
	dataset_path = data_root_path + "/" + data_sub_dir + "/KerrSFp_*.3d.hdf5"
	ds = yt.load(dataset_path) # this loads a dataset time series
	print("loaded data from ", dataset_path)
	print("time = ", time.time() - start_time)
	N = len(ds)
	
	ds0 = ds[0] # get the first dataset 
	
	# set centre
	center = [512.0, 512.0, 0]
	L = 512.0	
		
	data_storage = {}
	# iterate through datasets (forcing each to go to a different processor)
	for sto, dsi in ds.piter(storage=data_storage):
		time_0 = time.time()
		# store time
		current_time = dsi.current_time 
		output = [current_time]
		
		# make sphere
		sphere = dsi.sphere(center, max_radius) #- dsi.sphere(center, min_radius)
		volume = sphere.sum("cell_volume")
		if half_box:
			volume = 2*volume
			
		# calculate energy inside sphere
		meanE = sphere.mean("rho_E_eff", weight="cell_volume")
		E = volume*meanE
		output.append(E)
		
		# store output
		sto.result = output
		sto.result_id = str(dsi)
		dt = 1.25
		i = int(current_time/dt)
		print("done {:d} of {:d} in {:.1f} s".format(i+1, N, time.time()-time_0), flush=True)
	
	if yt.is_root():	
		# make data directory if it does not already exist
		makedirs(home_path + output_dir, exist_ok=True)
		# output to file
		if use_Eulerian_rho:
			#dd.filename = "l={:d}_m={:d}_a={:s}_mu={:s}_Al={:s}_mass_in_r={:d}_Eulerian_rho.csv".format(dd.l, dd.m, str(dd.a), dd.mu, dd.Al, max_radius)
			dd.filename = "{:s}_mass_in_r={:d}_Eulerian_rho.csv".format(dd.name, max_radius)
		else:
			#dd.filename = "l={:d}_m={:d}_a={:s}_mu={:s}_Al={:s}_mass_in_r={:d}_conserved_rho.csv".format(dd.l, dd.m, str(dd.a), dd.mu, dd.Al, max_radius)
			dd.filename = "{:s}_mass_in_r={:d}_conserved_rho.csv".format(dd.name, max_radius)
		output_path = home_path + output_dir + "/" + dd.filename 
		# output header to file
		f = open(output_path, "w+")
		f.write("# t	mass in r<=" + str(max_radius) + " #\n")
		# output data
		for key in sorted(data_storage.keys()):
			data = data_storage[key]
			f.write("{:.3f}	".format(data[0]))
			f.write("{:.2f}\n".format(data[1]))
		f.close()
		print("saved data to file " + str(output_path))
Ejemplo n.º 34
0
    fp = open("%s/parameter_file.txt" % (ic_dir), "a")
    fp.write("\n"
             "#\n"
             "# must-refine particle parameters\n"
             "# *** must also include method 8 in CellFlaggingMethod ***\n"
             "# *** do NOT include the RefineRegion parameters above ***\n"
             "#\n"
             "MustRefineParticlesCreateParticles = 3\n"
             "MustRefineParticlesRefineToLevel   = %d\n"
             "CosmologySimulationParticleTypeName          = RefinementMask\n" \
             % (params["level"]))
    fp.close()

    # Copy initial conditions directory to the simulation run directory
    print("Moving initial conditions to %s" % (params["sim_dir"]))
    os.rename(ic_dir, params["sim_dir"])

    return


if __name__ == "__main__":
    params = {}
    if yt.is_root():
        params = startup()
    if parallel:
        params = comm.bcast(params)
    params = get_previous_run_params(params)
    params = find_lagrangian_region(params)
    if yt.is_root():
        run_music(params)
def calculate_flux(dd, old_Jr):
    data_sub_dir = dd.name
    a = dd.a
    r_plus = M * (1 + math.sqrt(1 - a**2))
    r_minus = M * (1 - math.sqrt(1 - a**2))
    min_R = r_plus / 4

    start_time = time.time()

    # load dataset time series

    dataset_path = data_root_path + "/" + data_sub_dir + "/KerrSFp_*.3d.hdf5"
    ds = yt.load(dataset_path)  # this loads a dataset time series
    print("loaded data from ", dataset_path)
    print("time = ", time.time() - start_time)
    N = len(ds)

    # set centre
    center = [512.0, 512.0, 0]
    L = 512.0

    if old_Jr:
        # derived fields
        @derived_field(name="rho_Jr_eff", units="")
        def _rho_Jr_eff(field, data):
            r_BL = (data["spherical_radius"] /
                    cm) * (1 + r_plus * cm / (4 * data["spherical_radius"]))**2
            Sigma2 = r_BL**2 + (data["z"] * a * M / (r_BL * cm))
            #Delta = r_BL**2 + (a*M)**2 - 2*M*r_BL
            return ((data["spherical_radius"]**2 / cm**2) * (r_BL - r_minus) /
                    (Sigma2 * r_BL)) * data["S_r"] * pow(data["chi"], -3)

    elif not old_Jr:
        # derived fields
        @derived_field(name="rho_Jr_eff", units="")
        def _rho_Jr_eff(field, data):
            return data["J_r"]

    data_storage = {}
    # iterate through datasets (forcing each to go to a different processor)
    for sto, dsi in ds.piter(storage=data_storage):
        time_0 = time.time()
        # store time
        current_time = dsi.current_time
        output = [current_time]

        # make inner and outer shells
        outer_shell = dsi.sphere(center,
                                 max_R + 0.5 * outer_thickness) - dsi.sphere(
                                     center, max_R - 0.5 * outer_thickness)
        inner_shell = dsi.sphere(center, min_R + inner_thickness) - dsi.sphere(
            center, min_R)

        # calculate inner and outer flux
        Jr_outer = outer_shell.mean(
            "rho_Jr_eff", weight="cell_volume") * 4 * math.pi * (max_R**2)
        Jr_inner = inner_shell.mean("rho_Jr_eff",
                                    weight="cell_volume") * 4 * math.pi * (
                                        (r_plus / 4)**2)
        output.append(Jr_outer)
        output.append(Jr_inner)

        # store output
        sto.result = output
        sto.result_id = str(dsi)
        dt = 1.25
        i = int(current_time / dt)
        print("done {:d} of {:d} in {:.1f} s".format(i + 1, N,
                                                     time.time() - time_0),
              flush=True)

    if yt.is_root():
        # make data directory if it does not already exist
        makedirs(home_path + output_dir, exist_ok=True)
        # output to file
        if old_Jr:
            dd.filename = "l={:d}_m={:d}_a={:s}_phase={:s}_flux.csv".format(
                dd.l, dd.m, str(dd.a), dd.phase)
        elif not old_Jr:
            dd.filename = "l={:d}_m={:d}_a={:s}_phase={:s}_flux.csv".format(
                dd.l, dd.m, str(dd.a), dd.phase)
        output_path = home_path + output_dir + "/" + dd.filename
        # output header to file
        f = open(output_path, "w+")
        f.write("# t	flux r={:.0f}	flux r={:.2f} \n".format(max_R, min_R))
        # output data
        for key in sorted(data_storage.keys()):
            data = data_storage[key]
            f.write("{:.3f}	".format(data[0]))
            f.write("{:.3f}	".format(data[1]))
            f.write("{:.3f}\n".format(data[2]))
        f.close()
        print("saved data to file " + str(output_path))