def plotAccDens_v_z(rhoBH,scale,data=True,style='b-',ylog=True,xlog=True,overplot=False,lw=2,label=False): shankar09L = 3.2e5 shankar09H = 5.4e5 Salvaterra12 = 0.66e4 Salvaterra12zH = 9 Salvaterra12zL = 5 Treister13 = np.array([851.,666.,674.]) Treister13z = np.array([6.5,7.5,8.5]) Treister13zErr = np.array([.5,.5,.5]) Hopkins07zp1,Hopkins07 = readcol.readcol("/nobackupp8/mtremmel/DATA/QSOdata/RhoAccZ.csv",twod=False) Hopkins07zp1H,Hopkins07H = readcol.readcol("/nobackupp8/mtremmel/DATA/QSOdata/RhoAccZPLUS.csv",twod=False) Hopkins07zp1L,Hopkins07L = readcol.readcol("/nobackupp8/mtremmel/DATA/QSOdata/RhoAccZMINUS.csv",twod=False) Hopkins07perr = 10**Hopkins07H - 10**Hopkins07 Hopkins07merr = 10**Hopkins07 - 10**Hopkins07L plt.plot(scale**-1,rhoBH,style,linewidth=lw,label=label) if data: shankar09 = (shankar09H +shankar09L) / 2. err = shankar09H - shankar09 plt.errorbar([1.03],[shankar09],yerr=[err],color='black',fmt='D',label="Shankar+ 09") Salvaterra12z = (Salvaterra12zH + Salvaterra12zL)/2. plt.errorbar([Salvaterra12z+1],[Salvaterra12],color='black',fmt='x',xerr=[Salvaterra12zH-Salvaterra12z],yerr=0.5*Salvaterra12,uplims=[True],label='Salvaterra+ 12') plt.errorbar(Treister13z,Treister13,color='black',fmt='o',xerr=Treister13zErr,yerr=0.5*Treister13,uplims=[True,True,True], label='Treister+ 13') plt.errorbar(Hopkins07zp1,10**Hopkins07,color='grey',fmt='o',yerr=(Hopkins07merr,Hopkins07perr),label='Hopkins+ 07') if not overplot: if ylog: plt.yscale('log',base=10) if xlog: plt.xscale('log',base=10) plt.xticks([1,2,3,4,5,6,7,8,9,10],['0','1','2','3','4','5','6','7','8','9']) plt.ylabel(r'log($\rho_{acc}$ [M$_{\odot}$ Mpc$^{-3}$])',fontsize=30) plt.xlabel('Redshift',fontsize=30) return
def pltLumFun(data,lumbins,color='blue',linestyle='-',redshift=1,overplot=False,plotdata=True,label=None,linewidth=2): zz, = np.where(z==redshift) plt.step(lumbins,np.log10(np.append(data[zz,:],data[zz,-1])),color=color,linestyle=linestyle,label=label,lw=linewidth) if plotdata==True: obs = readcol.readcol('/nobackupp8/mtremmel/DATA/QSOdata/bol_lf_point_dump.dat',twod=False,asdict=True,skipline=38) obs2 = readcol.readcol('/nobackupp8/mtremmel/DATA/QSOdata/M1450z5_McGreer13.dat',twod=False,asdict=True,skipline=1) tt, = np.where(obs['redshift']==redshift) plt.errorbar(obs['lbol'][tt] + loglbol_sun, obs['dphi'][tt],yerr=obs['sig'][tt],fmt='o',color='grey',ecolor='grey',label='Hopkins+ 2007 (Compilation)') if z[zz] == 6: plt.errorbar([logLbol6B],[logphi6B],xerr=errlogLbol6B,yerr=errlogphi6B,fmt='^',color='k',label='Barger+2003') plt.errorbar([logLbol6F],[logphi6F],xerr=[[logLbol6Fm],[logLbol6Fp]],yerr=[[errlogphi6Fm],[errlogphi6Fp]],fmt='s',color='k',label='Fiore+ 2012') if z[zz] == 5: l1450 = np.log10(4.4)+mcABconv(obs2['M1450'],c/(0.145e-4)) dphi = 10**obs2['logphi'] dphip = (2./5.) * (dphi+obs2['sig']) dphim = (2./5.) * (dphi - obs2['sig']) dphi = np.log10((2./5.)*dphi) dphierr = [dphi-np.log10(dphim),np.log10(dphip)-dphi] plt.errorbar(l1450,dphi,yerr=dphierr,fmt='D',color='k',label='McGreer+ 2013') if overplot==False: plt.title(str(zbinsl[zz[0]])+' < z < '+str(zbinsh[zz[0]])) plt.xlabel(r'log$_{10}$($L_{bol}$ [ergs/s]))',fontsize=30) plt.ylabel(r'log$_{10}$($\phi$ [Mpc$^{-3}$ dex$^{-1}$])',fontsize=30) plt.legend(loc='lower left',fontsize=20) return
def get_pixel_phase(lfc_file): # read observation LFC peak positions from DAOPHOT output files try: _, y, x, _, _, _, _, _, _, _, _ = readcol(lfc_file, twod=False, skipline=2) except: _, y, x, _, _, _, _, _, _ = readcol(lfc_file, twod=False, skipline=2) del _ x_pixel_phase = x - np.round(x, 0) y_pixel_phase = y - np.round(y, 0) return x_pixel_phase, y_pixel_phase
def test_case(): """ A specific test case using the Klein 2005 luminosity computations """ dtab = readcol.readcol('/Users/adam/agpy/tests/klein2005sourcelist.txt', skipline=33, fsep='|') kleinlum = dtab[:, 5].astype('float') irasfluxes = dtab[:, numpy.array([22, 25, 28, 31])].astype('float') dist = dtab[:, 3].astype('float') * 1000 mylum = kleinlum * 0 mylum_interp = kleinlum * 0 nu = c * 1e4 / numpy.array([12.0, 25.0, 60.0, 100.0]) dnu = numpy.array([1.53e13, 5.789e12, 3.75e12, 1.114e12]) mlarr = [] for ind in xrange(len(mylum)): ml = luminosity(nu, irasfluxes[ind], dnu, dist_pc=dist[ind]) mylum[ind] = ml.lbol_meas() mylum_interp[ind] = ml.lbol_interp(addpoint=False, extrap=True) mlarr.append(ml) return kleinlum, mylum, mylum_interp, mlarr
def summarize_bench(in_file=None,out_file=None): """ Read and summarize a benchmarking file. """ if in_file == None: return task, tag, delta, start, stop = readcol(in_file,twod=False) dummy = os.popen("date") date_stamp = dummy.readlines() dummy = os.popen("uname -a") uname_stamp = dummy.readlines() dummy = os.popen("pwd") pwd_stamp = dummy.readline() lines = [] lines.append("Summary of file "+in_file+"\n") lines.append(date_stamp[0]+"\n") lines.append(uname_stamp[0]+"\n") lines.append(pwd_stamp+"\n") lines.append("\n") total_time = np.max(stop) - np.min(start) total_time_hr = total_time / 3600.0 lines.append("Total time: "+str(total_time)+" ("+str(total_time_hr)+" hr)\n") time_logged = np.sum(delta) lines.append("Time inside logged tasks: "+str(time_logged)+"\n") lines.append("Time outside logged tasks: "+str(total_time-time_logged)+"\n") lines.append("Total logged calls: "+str(len(task))+"\n") lines.append("Average time per call: "+str(np.mean(delta))+"\n") lines.append("\n") tasks_called = np.unique(task) n_calls = {} t_per_call = {} tot_t = {} for this_task in tasks_called: t_per_call[this_task] = np.mean(delta[task == this_task]) tot_t[this_task] = np.sum(delta[task == this_task]) n_calls[this_task] = np.sum(task == this_task) tot_t_vec = np.zeros_like(tasks_called) for i in range(len(tasks_called)): tot_t_vec[i] = tot_t[tasks_called[i]] order = np.argsort(tot_t_vec) tasks_called = tasks_called[order] for this_task in tasks_called: lines.append(this_task+" "+str(n_calls[this_task])+ \ " "+str(t_per_call[this_task])+ \ " "+str(tot_t[this_task])+"\n") if out_file == None: for line in lines: print line else: f = open(out_file,"w") f.writelines(lines) f.close()
def get_splat_table(webquery=False, savename=None, **kwargs): if webquery and webOK: splat = query_splatalogue.query_splatalogue(**kwargs) splat.describe() splat.rename_column('frequency', 'FreqGHz') splat.FreqGHz /= 1e3 splat.columns['FreqGHz'].unit = 'GHz' splat.rename_column('molecular formula', 'Species') splat.rename_column('quantum numbers', 'ResolvedQNs') try: splat.rename_column('chemicalname', 'ChemicalName') except: print "Failed to rename chemicalname." elif atpyOK: splat = atpy.Table(selfpath + "/splatalogue.csv", type='ascii', delimiter=':') else: splat = readcol.readcol(selfpath + "/splatalogue.csv", fsep=":", asStruct=True) for cn in splat.columns: if cn != R.sub('', cn): splat.rename_column(cn, R.sub('', cn)) if '' in splat.FreqGHz: splat.FreqGHz[splat.FreqGHz == ''] = '-999' splat.MeasFreqGHz[splat.MeasFreqGHz == ''] = '-999' splat.rename_column('FreqGHz', 'FreqGHzTxt') splat.add_column('FreqGHz', splat.FreqGHzTxt.astype('float')) splat.remove_columns('FreqGHzTxt') if 'MeasFreqGHz' in splat.columns: splat.rename_column('MeasFreqGHz', 'MeasFreqGHzTxt') splat.add_column('MeasFreqGHz', splat.MeasFreqGHzTxt.astype('float')) splat.remove_columns('MeasFreqGHzTxt') latex_names = [ LatexName(species, qn) for species, qn in zip(splat.Species, splat.ResolvedQNs) ] line_names = [ LineName(species, qn) for species, qn in zip(splat.Species, splat.ResolvedQNs) ] if hasattr(splat, 'add_column'): splat.add_column("LineName", line_names) splat.add_column("LatexName", latex_names) splat.add_column('frequency', splat.FreqGHz) if savename is not None: splat.write(savename) return splat
def read_baraffe(mass,model='BCAH98_models.1'): path,fname=os.path.split(__file__) keys=["m","age","Teff","g","log L","Mv","Mr","Mi","Mj","Mh","Mk","Ml'","Mm"] dat=readcol(os.path.join(path,model),colNames=keys) inds=dat["m"]==mass if inds.sum()==0: print "mass must be one of: ",np.unique(dat["m"]) goodvals=map(lambda k:dat[k][inds],keys) dat.update(zip(keys,goodvals)) dat['r']=R_from_T_and_L(dat["Teff"],10**dat["log L"]) return dat
def get_Ferguson_opacity(T,density=10**-16.1,OpacityFname='g7.02.tron'): path,fname=os.path.split(__file__) d=readcol(os.path.join(path,OpacityFname),colNames=(['T']+list(np.linspace(-8,1,19)))) log_rhos={} xy=[] z=[] for k in np.linspace(-8,1,19): log_rhos[k]=np.log10(((10**(d['T']-6))**3)*(10**k)) xy.extend(zip(d['T'],log_rhos[k])) z.extend(10**d[k]) func=nn(np.array(xy),np.array(z)) return func(np.log10(T),np.log10(density))
def spectra_fromcambfile(file,type = None,lmax = None): """ Returns a bunch of spectra from a CAMB output file 'type' is either a tranfer Pk file, a unlensed Cls or lensed Cls file following CAMB conv. for output files """ if type is None : # Let's try to guess from the file name for s in typedict : if s in file : type = s assert(type in typedict),"File type not understood, specify type keyword" # reads a list of columns. Should find the data type by itself. cols = rc.readcol(file,twod = False) if type == 'matterpower' : # Outputs a jc.cosmo Pk instance. assert(len(cols) >= 2),"File not understood" return cosmo.Pk(cols[0],cols[1]) elif type == 'scalCls' : assert(len(cols) >= 4),"File not understood" ell = cols[0] w = ell*(ell + 1)/(2.*np.pi) # weights in output file idc = np.where(ell <= lmax) if lmax is not None else np.arange(len(ell)) Cltt = cosmo.Cl_lminlmax(ell[idc],cols[1][idc]/w[idc]) Clee = cosmo.Cl_lminlmax(ell[idc],cols[2][idc]/w[idc]) Clte = cosmo.Cl_lminlmax(ell[idc],cols[3][idc]/w[idc]) return {'tt' : Cltt,'ee' : Clee, 'te' : Clte} elif type == 'lenspotentialCls' : # seven jc.cosmo Cl instances assert(len(cols) >= 8),"File not understood" ell = cols[0] w = ell*(ell + 1)/(2.*np.pi) # weights in output file idc = np.where(ell <= lmax) if lmax is not None else np.arange(len(ell)) Cltt = cosmo.Cl_lminlmax(ell[idc],cols[1][idc]/w[idc]) Clee = cosmo.Cl_lminlmax(ell[idc],cols[2][idc]/w[idc]) Clbb = cosmo.Cl_lminlmax(ell[idc],cols[3][idc]/w[idc]) Clte = cosmo.Cl_lminlmax(ell[idc],cols[4][idc]/w[idc]) w = ell**2*(ell + 1)**2/(2.*np.pi) Clpp = cosmo.Cl_lminlmax(ell[idc],cols[5][idc]/w[idc]) w = np.sqrt(ell**2*(ell + 1)**2)/(2.*np.pi) Clpt = cosmo.Cl_lminlmax(ell[idc],cols[6][idc]/w[idc]) Clpe = cosmo.Cl_lminlmax(ell[idc],cols[7][idc]/w[idc]) return {'tt' : Cltt,'ee' : Clee, 'te' : Clte, 'bb':Clbb,'pp':Clpp, 'pt':Clpt, 'pe':Clpe} elif type == 'lensedCls' : # 4 jc_cosmo Cl instances assert(len(cols) >= 5),"File not understood" ell = cols[0] w = ell*(ell + 1)/(2.*np.pi) # weights in output file idc = np.where(ell <= lmax) if lmax is not None else np.arange(len(ell)) Cltt = cosmo.Cl_lminlmax(ell[idc],cols[1][idc]/w[idc]) Clee = cosmo.Cl_lminlmax(ell[idc],cols[2][idc]/w[idc]) Clbb = cosmo.Cl_lminlmax(ell[idc],cols[3][idc]/w[idc]) Clte = cosmo.Cl_lminlmax(ell[idc],cols[4][idc]/w[idc]) return {'tt' : Cltt,'ee' : Clee,'bb' : Clbb, 'te' : Clte} assert(0),"How did you get there ??" return 0
def getAccDens(simname,vol = 25.**3, filename='AccDens.pkl',Mlimit=1.5e6,Llimit=1e42): f = open('files.list','r') files = f.readlines() f.close() s = pynbody.load(files[-5].strip('\n').strip('/')) munits = s.s['mass'].units tunits = s.s['x'].units/s.s['vel'].units mdotunits = munits/tunits #munits = pynbody.units.Unit(np.str(1.9911e15)+' Msol') del(s) gc.collect() if not os.path.exists(simname+'.BHorbit.abridged'):#+np.str(Mlimit)): print "Makeing abridged Accretion log file..." Mlimitsim = Mlimit/munits.in_units('Msol') mdotlimit = Llimit/(0.1*3e10*3e10) mdotlimit /= mdotunits.in_units('g s**-1') cstr = """ awk '{if ($4 - $13 > """+str(Mlimitsim)+""" && $12 > """+str(mdotlimit)+""") print $4 " " $12 " " $13 " " $15 " " $16}' """ + simname + ".orbit > " + simname + ".BHorbit.abridged" os.system(cstr) print "reading in data..." mass, mdot, dM, dt, scale = readcol.readcol(simname+'.BHorbit.abridged',twod=False) print "done!" del(dt) gc.collect() #del(iord) #gc.collect() print "sorting time..." o = np.argsort(scale) #del(time) #gc.collect() print "sorting other stuff..." dM = pynbody.array.SimArray(dM[o],munits) mass = pynbody.array.SimArray(mass[o],munits) mdot = pynbody.array.SimArray(mdot[o],mdotunits) #time = pynbody.array.SimArray(time[o],tunits) scale = scale[o] del(o) gc.collect() print "summing..." rhoBH = np.cumsum(dM[((mass.in_units('Msol')-dM.in_units('Msol')>Mlimit)&(mdot.in_units('g s**-1')*0.1*3e10*3e10>Llimit))].in_units('Msol'))/vol scale = scale[((mass.in_units('Msol')-dM.in_units('Msol')>Mlimit)&(mdot.in_units('g s**-1')*0.1*3e10*3e10>Llimit))] del(mass) del(dM) del(mdot) gc.collect() #time = time[(mass.in_units('Msol')>Mlimit)] if filename: print "saving data..." f = open(filename,'wb') pickle.dump([rhoBH,scale],f) f.close() return rhoBH,scale
def plotOccFrac(sim, centrals=True, rlim=1, cum=True, bins=10): ''' plot the black hole occupation fraction of halos as a function of mass ----inputs----- sim = name of the snapshot you wish to analyze centrals = whether or not you only want to count black holes within rlim from center of halo rlim = the maximum radius from the halo center that you would define a black hole to be "central" cum = whether you want a cumulative distribution bins = number of log bins in halo mass you want ----outputs---- array[occupation fraction] array[mass bins] also a nice plot! ''' stat = readcol.readcol(sim + '.amiga.stat', skipline=1) Mvir = stat[:, 5].astype('float') s = pynbody.load(sim) h = s.halos() bhgrps = s.stars['amiga.grp'][(s.stars['tform'] < 0)] print "calculating Min and Max halo masses..." MvirMin = h[int(bhgrps.max())]['mass'].in_units('Msol').sum() MvirMax = h[1]['mass'].in_units('Msol').sum() print "Min: ", MvirMin, " Max: ", MvirMax dLogM = (np.log10(MvirMax) - np.log10(MvirMin)) / bins BHFlag = np.zeros(bhgrps.max()) ugrps = np.unique(bhgrps) print "there are ", len(ugrps), "halos with BHs" print "determining existence of central BHs..." for i in ugrps: if i == 0: continue print "halo ", i cen = halo.shrink_sphere_center(h[i]) h[i].stars['pos'] -= cen if len(h[i].stars[((h[i].stars['tform'] < 0) & (h[i].stars['r'].in_units('kpc') < rlim))]) > 0: BHFlag[i - 1] = 1 h[i].stars['pos'] += cen occNum, Mbins = np.histogram(np.log10(Mvir[np.arange(bhgrps.max())]), bins=bins, weights=BHFlag) HNum, Mbins2 = np.histogram(np.log10(Mvir[np.arange(bhgrps.max())]), bins=bins) if cum == True: occFrac = np.cumsum(occNum) / np.cumsum(HNum).astype('float') else: occFrac = occNum / HNum.astype('float') return Mbins, occFrac
def get_Ferguson_opacity_linear(T,density=10**-16.1,OpacityFname='g7.02.tron'): path,fname=os.path.split(__file__) d=readcol(os.path.join(path,OpacityFname),colNames=(['T']+list(np.linspace(-8,1,19)))) log_rhos={} xy=[] z=[] for k in np.linspace(-8,1,19): print k log_rhos[k]=np.log10(((10**(d['T']-6))**3)*(10**k)) xy.extend(zip(d['T'],log_rhos[k])) z.extend(10**d[k]) mpl.scatter([x[0] for x in xy],[y[1] for y in xy],c=z) mpl.show() func=ll(np.array(xy),np.array(z)) print np.array(xy)[:2,:], np.array(z)[:2] return func(np.log10(T),np.log10(density))
def getBHiords(simname): if not os.path.exists("BHid.list"): print "finding IDs for all BHs that ever existed..." os.system("awk '{print $1}' " + simname + ".orbit > BHid.list") f = open("BHid.list", 'r') id = f.readlines() id = np.array(id) id = id.astype('int') id = np.unique(id) f.close() os.system("rm BHid.list") np.savetxt("BHid.list", id) else: print "previous BHid.list file found! reading it..." id, = readcol.readcol("BHid.list", twod=False) return id
def getBHiords(simname): if not os.path.exists("BHid.list"): print "finding IDs for all BHs that ever existed..." os.system("awk '{print $1}' "+simname+".orbit > BHid.list") f = open("BHid.list",'r') id = f.readlines() id = np.array(id) id = id.astype('int') id = np.unique(id) f.close() os.system("rm BHid.list") np.savetxt("BHid.list",id) else: print "previous BHid.list file found! reading it..." id, = readcol.readcol("BHid.list",twod=False) return id
def plotOccFrac(sim,centrals=True,rlim=1,cum=True,bins=10): ''' plot the black hole occupation fraction of halos as a function of mass ----inputs----- sim = name of the snapshot you wish to analyze centrals = whether or not you only want to count black holes within rlim from center of halo rlim = the maximum radius from the halo center that you would define a black hole to be "central" cum = whether you want a cumulative distribution bins = number of log bins in halo mass you want ----outputs---- array[occupation fraction] array[mass bins] also a nice plot! ''' stat = readcol.readcol(sim+'.amiga.stat',skipline=1) Mvir = stat[:,5].astype('float') s = pynbody.load(sim) h = s.halos() bhgrps = s.stars['amiga.grp'][(s.stars['tform']<0)] print "calculating Min and Max halo masses..." MvirMin = h[bhgrps.max()]['mass'].in_units('Msol').sum() MvirMax = h[1]['mass'].in_units('Msol').sum() print "Min: ", MvirMin, " Max: ", MvirMax dLogM = (np.log10(MvirMax) - np.log10(MvirMin))/bins BHFlag = np.zeros(bhgrps.max()) ugrps = np.unique(bhgrps) print "there are ", len(ugrps), "halos with BHs" print "determining existence of central BHs..." for i in ugrps: if i == 0: continue print "halo ", i cen = halo.shrink_sphere_center(h[i]) h[i].stars['pos'] -= cen if len(h[i].stars[((h[i].stars['tform']<0)&(h[i].stars['r'].in_units('kpc')<rlim))])>0: BHFlag[i-1] = 1 h[i].stars['pos'] += cen occNum, Mbins = np.histogram(np.log10(Mvir[np.arange(bhgrps.max())]),bins=bins,weights = BHFlag) HNum, Mbins2 = np.histogram(np.log10(Mvir[np.arange(bhgrps.max())]),bins=bins) if cum==True: occFrac = np.cumsum(occNum)/np.cumsum(HNum).astype('float') else: occFrac = occNum/HNum.astype('float') return Mbins,occFrac
def get_splat_table(webquery=False, savename=None, **kwargs): if webquery and webOK: splat = query_splatalogue.query_splatalogue(**kwargs) splat.describe() splat.rename_column("frequency", "FreqGHz") splat.FreqGHz /= 1e3 splat.columns["FreqGHz"].unit = "GHz" splat.rename_column("molecular formula", "Species") splat.rename_column("quantum numbers", "ResolvedQNs") try: splat.rename_column("chemicalname", "ChemicalName") except: print "Failed to rename chemicalname." elif atpyOK: splat = atpy.Table(selfpath + "/splatalogue.csv", type="ascii", delimiter=":") else: splat = readcol.readcol(selfpath + "/splatalogue.csv", fsep=":", asStruct=True) for cn in splat.columns: if cn != R.sub("", cn): splat.rename_column(cn, R.sub("", cn)) if "" in splat.FreqGHz: splat.FreqGHz[splat.FreqGHz == ""] = "-999" splat.MeasFreqGHz[splat.MeasFreqGHz == ""] = "-999" splat.rename_column("FreqGHz", "FreqGHzTxt") splat.add_column("FreqGHz", splat.FreqGHzTxt.astype("float")) splat.remove_columns("FreqGHzTxt") if "MeasFreqGHz" in splat.columns: splat.rename_column("MeasFreqGHz", "MeasFreqGHzTxt") splat.add_column("MeasFreqGHz", splat.MeasFreqGHzTxt.astype("float")) splat.remove_columns("MeasFreqGHzTxt") latex_names = [LatexName(species, qn) for species, qn in zip(splat.Species, splat.ResolvedQNs)] line_names = [LineName(species, qn) for species, qn in zip(splat.Species, splat.ResolvedQNs)] if hasattr(splat, "add_column"): splat.add_column("LineName", line_names) splat.add_column("LatexName", latex_names) splat.add_column("frequency", splat.FreqGHz) if savename is not None: splat.write(savename) return splat
def create_gaiadr2_id_dict( path='/Users/christoph/OneDrive - UNSW/observations/', savefile=True): # prepare dictionary gaia_dict = {} # read input file for ALL targets (excluding B-stars) targets, T0, P, g2id = readcol(path + 'PT0_gaiadr2_list.txt', twod=False, verbose=False) # fill dictionary with target for i in range(len(targets)): gaia_dict[targets[i]] = {'gaia_dr2_id': g2id[i][1:]} if savefile: np.save(path + 'gaiadr2_id_dict.npy', gaia_dict) return gaia_dict
def spectra_fromcambfile(file, type=None, lmax=None): """ Returns a bunch of spectra from a CAMB output file 'type' is either a tranfer Pk file, a unlensed Cls or lensed Cls file following CAMB conv. for output files """ if type is None: # Let's try to guess from the file name for s in typedict: if s in file: type = s assert (type in typedict), type # reads a list of columns. Should find the data type by itself. cols = rc.readcol(file, twod=False) if type == 'matterpower': # Outputs a jc.cosmo Pk instance. assert (len(cols) >= 2), len(cols) return cosmo.Pk(cols[0], cols[1]) elif type == 'lenspotentialCls': # seven jc.cosmo Cl instances assert (len(cols) >= 8), len(cols) ell = cols[0] w = ell * (ell + 1) / (2. * np.pi) # weights in output file idc = np.where(ell <= lmax) if lmax is not None else np.arange(len(ell)) Cltt = cosmo.Cl_lminlmax(ell[idc], cols[1][idc] / w[idc]) Clee = cosmo.Cl_lminlmax(ell[idc], cols[2][idc] / w[idc]) Clbb = cosmo.Cl_lminlmax(ell[idc], cols[3][idc] / w[idc]) Clte = cosmo.Cl_lminlmax(ell[idc], cols[4][idc] / w[idc]) w = ell ** 2 * (ell + 1) ** 2 / (2. * np.pi) Clpp = cosmo.Cl_lminlmax(ell[idc], cols[5][idc] / w[idc]) w = np.sqrt(ell.astype(float) ** 3 * (ell + 1.) ** 3) / (2. * np.pi) Clpt = cosmo.Cl_lminlmax(ell[idc], cols[6][idc] / w[idc]) Clpe = cosmo.Cl_lminlmax(ell[idc], cols[7][idc] / w[idc]) return {'tt': Cltt, 'ee': Clee, 'te': Clte, 'bb': Clbb, 'pp': Clpp, 'pt': Clpt, 'pe': Clpe} elif type == 'lensedCls' or type == 'tensCls': # 4 jc_cosmo Cl instances assert (len(cols) >= 5), len(cols) ell = cols[0] w = ell * (ell + 1) / (2. * np.pi) # weights in output file idc = np.where(ell <= lmax) if lmax is not None else np.arange(len(ell)) Cltt = cosmo.Cl_lminlmax(ell[idc], cols[1][idc] / w[idc]) Clee = cosmo.Cl_lminlmax(ell[idc], cols[2][idc] / w[idc]) Clbb = cosmo.Cl_lminlmax(ell[idc], cols[3][idc] / w[idc]) Clte = cosmo.Cl_lminlmax(ell[idc], cols[4][idc] / w[idc]) return {'tt': Cltt, 'ee': Clee, 'bb': Clbb, 'te': Clte} assert (0), "How did you get there ??" return 0
def get_all_polys(): npolys = 10000 tab = 'test_small' np.random.seed(1) catra, catdec = readcol('/tmp/zz3_')#./gen_data 3 10000 pool = multiprocessing.Pool(8) res = [] for i in range(npolys): a,b = gen_random_poly() coostring = ','.join(['%f'%_ for _ in np.array([a,b]).T.flatten()]) query = 'select count(*) from %s where q3c_poly_query(ra,dec,ARRAY[%s]);'%(tab,coostring) print query #print a,b #print within_poly(catra,catdec,a,b); res.append(pool.apply_async(within_poly,(catra,catdec,a,b))) pool.close() pool.join() for r in res: print r.get()
def getLumFun(sim,simname,bins=50,loglmin=43,loglmax=46,vol=25**3,minm=1e6,filename='LumFun.pkl'): munits = sim.s['mass'].units tunits = sim.s['x'].units/sim.s['vel'].units mdotunits = munits/tunits tbinsh =np.array([bhanalysis.getTime(zz,sim) for zz in zbinsl]) tbinsl = np.array([bhanalysis.getTime(zz,sim) for zz in zbinsh]) dtbins = tbinsh - tbinsl dlogl = np.float(loglmax-loglmin)/bins if not os.path.exists(simname+'.BHorbit.abridged'): mkAbridgeOrbit(simname,sim,lmin=10**loglmin,mmin=minm) mass, mdot, dm, dt, scale = readcol.readcol(simname+'.BHorbit.abridged',twod=False) ok, = np.where((mass - dm > minm/munits.in_units("Msol"))&(mdot > 10**loglmin/(0.1*3e10*3e10*mdotunits.in_units('g s**-1')))) del(dm) del(mass) gc.collect() mdot = pynbody.array.SimArray(mdot[ok],mdotunits) dt = pynbody.array.SimArray(dt[ok],tunits) scale = scale[ok] del(ok) gc.collect() lum = mdot.in_units('g s**-1')*0.1*3e10*3e10 del(mdot) gc.collect() data = np.zeros((len(z),bins)) for i in range(len(z)): print 'redshift ', z[i] oo, = np.where((scale**-1 -1 > zbinsl[i])&(scale**-1 -1 < zbinsh[i])) weights = dt[oo].in_units('Gyr')/(dtbins[i]*dlogl*vol) lumhist, lumbins = np.histogram(np.log10(lum[oo]),range=[loglmin,loglmax],weights=weights,bins=bins) data[i,:] = lumhist del(oo) gc.collect() del(lum) gc.collect() if filename: print "saving data..." f = open(filename,'wb') pickle.dump([data,lumbins],f) f.close() return data, lumbins
def conv_tlusty_spect(tlusty_dir, tlusty_conv_dir): """ Take all Tlusty spectra from a directory, convole to 0.1A, then save to a new directory Currently resampling onto a wavelength grid of 0.1A also, from 3000 to 12000A to match AMBRE spectra, note Tlusty only covers 3000A to 10000A also mostly matching filenames """ infns = glob.glob(tlusty_dir + '/*.vis.7') for ii, infn in enumerate(infns): indata = readcol(infn) wav = indata[:, 0] data = indata[:, 1] cdata = np.convolve(data, np.ones(10) / 10.0, 'same') intwav = 0.1 * np.arange(90000) + 3000.0 icdata = np.interp(intwav, wav, cdata) n1 = infn.split('/')[-1].split('BG')[1].split('g') n2 = 'g+' + str(float(n1[1].split('v')[0]) / 100.0) n1 = 'p' + str(int(n1[0]) / 1) outname = tlusty_conv_dir + '/' + n1 + ':' + n2 + ':m0.0:t01:z+0.00:a+0.00.TLUSTYconv.fits' pyfits.writeto(outname, icdata, clobber=True) print('convolving ' + str(ii + 1) + ' out of ' + str(len(infns)))
def get_all_polys(): npolys = 10000 tab = 'test_small' np.random.seed(1) catra, catdec = readcol('/tmp/zz3_') #./gen_data 3 10000 pool = multiprocessing.Pool(8) res = [] for i in range(npolys): a, b = gen_random_poly() coostring = ','.join(['%f' % _ for _ in np.array([a, b]).T.flatten()]) query = 'select count(*) from %s where q3c_poly_query(ra,dec,ARRAY[%s]);' % ( tab, coostring) print query #print a,b #print within_poly(catra,catdec,a,b); res.append(pool.apply_async(within_poly, (catra, catdec, a, b))) pool.close() pool.join() for r in res: print r.get()
def create_PT0_dict(path='/Users/christoph/OneDrive - UNSW/observations/', savefile=True): # prepare dictionary PT0_dict = {} # # read TOI input file # targets, T0, P = readcol(path + 'toi_PT0_list.txt', twod=False, verbose=False) # # read input file for other targets (excluding B-stars) # targets, T0, P = readcol(path + 'other_PT0_list.txt', twod=False, verbose=False) # read input file for ALL targets (excluding B-stars) targets, T0, P = readcol(path + 'PT0_list.txt', twod=False, verbose=False) # fill dictionary with other targets for i in range(len(targets)): PT0_dict[targets[i]] = {'P': P[i], 'T0': T0[i]} if savefile: np.save(path + 'PT0_dict.npy', PT0_dict) return PT0_dict
def conv_tlusty_spect(tlusty_dir,tlusty_conv_dir): """ Take all Tlusty spectra from a directory, convole to 0.1A, then save to a new directory Currently resampling onto a wavelength grid of 0.1A also, from 3000 to 12000A to match AMBRE spectra, note Tlusty only covers 3000A to 10000A also mostly matching filenames """ infns = glob.glob(tlusty_dir + '/*.vis.7') for ii,infn in enumerate(infns): indata = readcol(infn) wav = indata[:,0] data = indata[:,1] cdata = np.convolve(data,np.ones(10)/10.0,'same') intwav = 0.1*np.arange(90000)+3000.0 icdata = np.interp(intwav,wav,cdata) n1 = infn.split('/')[-1].split('BG')[1].split('g') n2 = 'g+'+str(float(n1[1].split('v')[0])/100.0) n1 = 'p'+str(int(n1[0])/1) outname = tlusty_conv_dir+'/'+n1 + ':'+n2+':m0.0:t01:z+0.00:a+0.00.TLUSTYconv.fits' pyfits.writeto(outname,icdata,clobber=True) print('convolving '+ str(ii+1) +' out of ' + str(len(infns)))
def get_Semenov_opacity(T,density=10**-16.1,Rosseland=True, OpacityFname='kPkR.dat'): '''Get the disk opacity according to the iron-poor silicate model of Semenov et al. 2003 models. This function interpolates between temperature and density. The density interpolation is probably not what we want, and density=10**-16.1 or density=10**-12.5 should be used inputs: T - the temperature of the medium in Kelvin density - the volumetric density in g/cm^3 Rosseland - (bool) True to use the Rosseland mean, False to use the Planck mean opacity''' path,fname=os.path.split(__file__) d=readcol(os.path.join(path,OpacityFname),colNames=['T','P161','R161','P125','R125']) if Rosseland==True: low_density=interp1d(d['T'],d['R161'])(T) high_density=interp1d(d['T'],d['R125'])(T) delta_density=(density-10**-16.1)/(10**-12.5-10**-16.1) return low_density+(high_density-low_density)*delta_density elif Rosseland==False: low_density=interp1d(d['T'],d['P161'])(T) high_density=interp1d(d['T'],d['P125'])(T) delta_density=(density-10**-16.1)/(10**-12.5-10**-16.1) return low_density+(high_density-low_density)*delta_density
def test_case(): """ A specific test case using the Klein 2005 luminosity computations """ dtab = readcol.readcol('/Users/adam/agpy/tests/klein2005sourcelist.txt',skipline=33,fsep='|') kleinlum = dtab[:,5].astype('float') irasfluxes = dtab[:,numpy.array([22,25,28,31])].astype('float') dist = dtab[:,3].astype('float')*1000 mylum = kleinlum*0 mylum_interp = kleinlum*0 nu = c*1e4/numpy.array([12.0,25.0,60.0,100.0]) dnu = numpy.array([1.53e13,5.789e12,3.75e12, 1.114e12]) mlarr = [] for ind in xrange(len(mylum)): ml = luminosity(nu,irasfluxes[ind],dnu,dist_pc=dist[ind]) mylum[ind] = ml.lbol_meas() mylum_interp[ind] = ml.lbol_interp(addpoint=False,extrap=True) mlarr.append(ml) return kleinlum,mylum,mylum_interp,mlarr
def inreader(infile): names, values = readcol(infile, twod=False, fsep='&') outstruc = dict(zip(names, values)) outstruc['index'] = names outstruc['invals'] = values return outstruc
def plot_dmprof2( self, left = False, bottom = False ): """ make a single plot with the correct environment for a single galaxy\n Returns an Axes object""" r = np.array( self.r ) * self.dis / 206265. r2 = np.array( self.r2 ) * self.dis / 206265. plt.loglog( self.r, self.dm, linewidth = None ) plt.xlim( [ x * self.dis / 206265 for x in self.xrange ] ) plt.ylim( self.yrange ) rhol2 = np.array( self.dmlo2 ) rhoh2 = np.array( self.dmhi2 ) rs, stars, stars_err = readcol( 'stars.out', twod = False ) dmhi2 = rhoh2 - stars dmlo2 = rhol2 - stars #badlist = np.where( rhol2 < ( stars + stars_err ) )[ 0] badlist = np.where( dmlo2 - ( stars + stars_err ) < 0 )[ 0 ] badstart = badlist[ 0 ] badend = badlist[ -1 ] stars5 = np.interp( r, rs, stars ) stars5_err = np.interp( r, rs, stars_err ) dm = np.array( self.dm ) - stars5 rho_err_temp = np.array( self.dmhi ) - np.array( self.dm ) dm_err = ( rho_err_temp**2 + stars5_err**2 )**( 0.5 ) #sys0.exit() # what I think is DM is actually rho since dsphdm_fnx spits out rho plt.fill_between( r2[ :badstart ], dmlo2[ :badstart], dmhi2[ :badstart ], color = '0.7' ) plt.fill_between( r2[ badend + 1: ], dmlo2[ badend + 1: ], dmhi2[ badend + 1: ], color = '0.7' ) plt.fill_between( rs, stars - stars_err, stars + stars_err, color = 'r' ) inner = np.arange( 0, self.n_inner ) outer = np.arange( self.n_inner, self.n_inner + self.n_outer ) # **** HEADS UP *** # I'm hard-coding outer to skip the one negative point in Fnx outer = np.arange( 3, 5 ) # *** *** rho_err = np.array( self.dmhi ) - np.array( self.dm ) rho = np.array( self.dm ) plt.errorbar( r[ inner ], dm[ inner ], dm_err[ inner ], fmt = 'x', color = '0.3', ecolor = '0.3' ) plt.errorbar( r[ outer ], dm[ outer ], dm_err[ outer ], fmt = '.', color = '0.', ecolor = '0.' ) # --- set kinrange indicators x1 = self.kinrange[ 0 ] * self.dis / 206265. x2 = self.kinrange[ 1 ] * self.dis / 206265. plt.loglog([ x1, x1 ], [ plt.ylim()[ 0 ], 2. * plt.ylim()[ 0 ] ], 'k' ) plt.loglog([ x2, x2 ], [ plt.ylim()[ 0 ], 2. * plt.ylim()[ 0 ] ], 'k' ) # --- draw NFW line x1 = np.log10( 5. * plt.xlim()[ 0 ] ) x2 = np.log10( .5 * plt.xlim()[ 1 ] ) y1 = np.log10( .2 * plt.ylim()[ 1 ] ) y2 = -1. * ( x2 - x1 ) + y1 plt.loglog( [ 10**x for x in [ x1, x2 ] ], [ 10**y for y in [ y1, y2 ] ], 'k--' ) ax = plt. gca() if not left: ax.set_yticklabels( [] ) if not bottom: ax.set_xticklabels( [] ) plt.text( 0.6, 0.9, self.galname, transform = ax.transAxes ) return ax
def getBHhalo(simname,findcenter='hyb',minHM = 1e10,minNum=30,filename=None, initFile=None): if not os.path.exists("grpfiles.list"): simname_split = simname.split('.') num = len(simname_split) os.system('ls '+simname+'.00*.grp | cut -d "." -f1-'+str(num+1)+ '> grpfiles.list' ) if filename: if os.path.exists(filename): print "file", filename, "already exists! reading it in and appending it with new data" f = open(filename,'rb') BHhaloOLD = pickle.load(f) f.close() startStep = len(BHhaloOLD['haloID'][0]) os.system('rm '+filename) print "old file has", startStep, "halos already completed" else: startStep = 0 if initFile: if os.path.exists(initFile): print "found file ", initFile, "reading it in now" f = open(initFile,'rb') BHhaloOLD = pickle.load(f) f.close() startStep = len(BHhaloOLD['haloID'][0]) print "given file has", startStep, "halos already completed" if initFile==filename: print "given file has same name as target file... deleting old file to replace with new file" os.system('rm '+filename) if initFile==None: startStep = 0 f= open("grpfiles.list") munits = 'Msol' vunits = 'km s**-1' posunits = 'kpc' cposunits = 'a kpc' print "finding BH iords..." bhiords = getBHiords(simname) files = f.readlines() f.close() nsteps = len(files) - startStep nbh = len(bhiords) bhmass = array.SimArray(np.zeros((nbh,nsteps)),munits) haloid = np.zeros((nbh,nsteps)) mhalo = array.SimArray(np.zeros((nbh,nsteps)),munits) mdark = array.SimArray(np.zeros((nbh,nsteps)),munits) mstar = array.SimArray(np.zeros((nbh,nsteps)),munits) mgas = array.SimArray(np.zeros((nbh,nsteps)),munits) #vhalo = array.SimArray(np.zeros((nbh,nsteps,3)),vunits) dist = array.SimArray(np.zeros((nbh,nsteps)),posunits) distcen = array.SimArray(np.zeros((nbh,nsteps)),posunits) bhpos = array.SimArray(np.zeros((nbh,nsteps,3)),posunits) bhposcen = array.SimArray(np.zeros((nbh,nsteps,3)),posunits) bhvel = array.SimArray(np.zeros((nbh,nsteps,3)),vunits) bhvelcen = array.SimArray(np.zeros((nbh,nsteps,3)),vunits) halorad = array.SimArray(np.zeros((nbh,nsteps)),posunits) scaleFac = np.zeros((nbh,nsteps)) interact = np.zeros((nbh,nsteps)) intpos = array.SimArray(np.zeros((nbh,nsteps,3)),posunits) intvel = array.SimArray(np.zeros((nbh,nsteps,3)),vunits) intdist = array.SimArray(np.zeros((nbh,nsteps)),posunits) #rho = array.SimArray(np.zeros((nbh,nsteps)),'g cm**-3') #cs = array.SimArray(np.zeros((nbh,nsteps)),'cm s**-1') for stepcnt in range(nsteps): line = files[stepcnt+startStep].strip() print "getting halo information for ", line s = pynbody.load(line) s.physical_units() cboxsize = 2*s['x'].in_units('a kpc').max() simBH, = np.where(np.in1d(s.star['iord'],bhiords)) if not len(simBH): print "no BHs in this step! moving on..." continue boxsize = cboxsize.in_units('kpc') amigastat = readcol.readcol(line+'.amiga.stat',asdict=True) amigastat['cen'] = pynbody.array.SimArray((np.array([amigastat['Xc'],amigastat['Yc'],amigastat['Zc']]).T*1e3 - cboxsize/2.)*s.properties['a'],posunits) h = s.halos() #simBH, = np.where(np.in1d(s.star['iord'],bhiords)) okgrp, = np.where(np.in1d(s.star['amiga.grp'][simBH],amigastat['Grp'])) simBH = simBH[okgrp] asort = np.argsort(s.star['iord'][simBH]) simBH = simBH[asort] simoutBH, = np.where(np.in1d(bhiords,s.star['iord'][simBH])) #outBH = np.where(np.in1d(bhiords,s.star['iord'][simBH])) print "there are ", len(simBH), "BHs in the step" allHaloID,invInd = np.unique(s.star['amiga.grp'][simBH],return_inverse=True) statind, = np.where(np.in1d(amigastat['Grp'],allHaloID)) #bad, = np.where(np.in1d(allHaloID,amigastat['Grp'])==False) #np.delete(allHaloID,bad) #badind, = np.where(np.in1d(invInd,bad)) #np.delete(invInd,badind) if not np.array_equal(allHaloID[invInd],amigastat['Grp'][statind[invInd]]): print "f**k!" return haloid[simoutBH,stepcnt] = allHaloID[invInd] mhalo[simoutBH,stepcnt] = pynbody.array.SimArray(amigastat['Mvir(M_sol)'][statind[invInd]],munits) mstar[simoutBH,stepcnt] = pynbody.array.SimArray(amigastat['StarMass(M_sol)'][statind[invInd]],munits) mgas[simoutBH,stepcnt] = pynbody.array.SimArray(amigastat['GasMass(M_sol)'][statind[invInd]],munits) mgas[simoutBH,stepcnt] = pynbody.array.SimArray(amigastat['GasMass(M_sol)'][statind[invInd]],munits) halorad[simoutBH,stepcnt] = pynbody.array.SimArray(amigastat['Rvir(kpc)'][statind[invInd]]*s.properties['a'],posunits) scaleFac[simoutBH,stepcnt] = s.properties['a'] vel = np.array([amigastat['VXc'][statind[invInd]],amigastat['VYc'][statind[invInd]],amigastat['VZc'][statind[invInd]]]).T bhvel[simoutBH,stepcnt,:] = s.stars['vel'][simBH].in_units(vunits) - vel postemp = s.stars['pos'][simBH].in_units(posunits) - amigastat['cen'][statind[invInd]] postemp[(np.abs(postemp)>boxsize/2.)] = -1.0*(postemp[(np.abs(postemp)>boxsize/2.)]/np.abs(postemp[(np.abs(postemp)>boxsize/2.)])) * (boxsize-np.abs(postemp[(np.abs(postemp)>boxsize/2.)])) bhpos[simoutBH,stepcnt,:] = postemp bhmass[simoutBH,stepcnt] = s.stars['mass'][simBH].in_units(munits) dist[simoutBH,stepcnt] = np.sqrt((bhpos[simoutBH,stepcnt,:]**2).sum(axis=1)) for cnt in range(len(allHaloID)): if allHaloID[cnt]==0: continue print allHaloID[cnt] oo, = np.where(amigastat['Grp']==allHaloID[cnt]) #cen = pynbody.array.SimArray([amigastat['Xc'][oo[0]],amigastat['Yc'][oo[0]],amigastat['Zc'][oo[0]]],posunits) if amigastat['Mvir(M_sol)'][(amigastat['Grp']==allHaloID[cnt])]<minHM and allHaloID[cnt] > minNum: continue okcenter = 1 try: pynbody.analysis.halo.center(h[allHaloID[cnt]],mode=findcenter,wrap=True,cen_size='2 kpc') except ValueError: okcenter = 0 pynbody.analysis.halo.center(h[allHaloID[cnt]],mode=findcenter,Wrap=True,cen_size='2 kpc',vel=False) haloBHs, = np.where(np.in1d(h[allHaloID[cnt]].star['iord'],bhiords)) outBH, = np.where(np.in1d(bhiords,h[allHaloID[cnt]].star['iord'][haloBHs])) #pynbody.transformation.inverse_translate(s,cen) closeBHs, = np.where((s.star['r'][simBH].in_units('kpc')<amigastat['Rvir(kpc)'][oo]*s.properties['a'])&(s.star['amiga.grp'][simBH]>allHaloID[cnt])) closeBHs = simBH[closeBHs] otheroutBHs, = np.where(np.in1d(bhiords,s.star['iord'][closeBHs])) bhposcen[outBH,stepcnt,:] = h[allHaloID[cnt]].stars['pos'][haloBHs].in_units(posunits) distcen[outBH,stepcnt] = h[allHaloID[cnt]].stars['r'][haloBHs].in_units(posunits) interact[otheroutBHs,stepcnt] = allHaloID[cnt] intpos[otheroutBHs,stepcnt,:] = s.stars['pos'][closeBHs].in_units(posunits) #intvel[otheroutBHs,stepcnt,:] = s.stars['vel'][closeBHs].in_units(vunits) intdist[otheroutBHs,stepcnt] = s.stars['r'][closeBHs].in_units(posunits) if okcenter == 1: intvel[otheroutBHs,stepcnt,:] = s.stars['vel'][closeBHs].in_units(vunits) bhvelcen[outBH,stepcnt,:] = h[allHaloID[cnt]].stars['vel'][haloBHs].in_units(vunits) print "deleting stuff" del(s) del(h) gc.collect() bhhalo = {'iord':bhiords,'mass':bhmass,'pos':bhpos,'poscen':bhposcen,'vel':bhvel,'velcen':bhvelcen,'haloID':haloid,'halomass': mhalo,'halostarmass':mstar,'halodarkmass':mdark,'halogasmass':mgas,'rhalo':halorad,'dist':dist,'distcen':distcen,'interact':interact,'intdist':intdist,'intvel':intvel,'intpos':intpos,'scaleFac':scaleFac} if startStep != 0: bhhalo['mass'] = np.append(BHhaloOLD['mass'], bhhalo['mass'],axis=1) bhhalo['pos'] = np.append(BHhaloOLD['pos'], bhhalo['pos'],axis=1) bhhalo['poscen'] = np.append(BHhaloOLD['poscen'], bhhalo['poscen'],axis=1) bhhalo['vel'] = np.append(BHhaloOLD['vel'], bhhalo['vel'],axis=1) bhhalo['velcen'] = np.append(BHhaloOLD['velcen'], bhhalo['velcen'],axis=1) bhhalo['haloID'] = np.append(BHhaloOLD['haloID'], bhhalo['haloID'],axis=1) bhhalo['halomass'] = np.append(BHhaloOLD['halomass'], bhhalo['halomass'],axis=1) bhhalo['halostarmass'] = np.append(BHhaloOLD['halostarmass'], bhhalo['halostarmass'],axis=1) bhhalo['halodarkmass'] = np.append(BHhaloOLD['halodarkmass'], bhhalo['halodarkmass'],axis=1) bhhalo['halogasmass'] = np.append(BHhaloOLD['halogasmass'], bhhalo['halogasmass'],axis=1) bhhalo['rhalo'] = np.append(BHhaloOLD['rhalo'], bhhalo['rhalo'],axis=1) bhhalo['dist'] = np.append(BHhaloOLD['dist'], bhhalo['dist'],axis=1) bhhalo['distcen'] = np.append(BHhaloOLD['distcen'], bhhalo['distcen'],axis=1) bhhalo['interact'] = np.append(BHhaloOLD['interact'],bhhalo['interact'],axis=1) bhhalo['intdist'] = np.append(BHhaloOLD['intdist'],bhhalo['intdist'],axis=1) bhhalo['intpos'] = np.append(BHhaloOLD['intpos'],bhhalo['intpos'],axis=1) bhhalo['intvel'] = np.append(BHhaloOLD['intvel'],bhhalo['intvel'],axis=1) bhhalo['scaleFac'] = np.append(BHhaloOLD['scaleFac'], bhhalo['scaleFac'],axis=1) if filename: f = open(str(filename),'wb') pickle.dump(bhhalo,f) f.close() return bhhalo
import pynbody import numpy as np import pandas as pd import matplotlib.pyplot as plt import readcol files1 = readcol.readcol('60352986.fits.txt' ) files2 = readcol.readcol('60354630.fits.txt' ) maxtime=max(files2[:,0]) print maxtime index= np.where(files1[:,0] <= maxtime) x_position1= files1[:,1] x_position= x_position1[index] y_position1= files1[:,1] y_position= y_position1[index] X_POSITION2 = files2[:,1] Y_POSITION2= files2[:,2] #Y_POSITION= np.array(Y_POSITION) plt.plot(x_position, y_position) plt.ylabel("Y-POSITION for 60352986 " ) plt.xlabel("X-POSITION")
def getAccDens(simname, vol=25.**3, filename='AccDens.pkl', Mlimit=1.5e6, Llimit=1e42): f = open('files.list', 'r') files = f.readlines() f.close() s = pynbody.load(files[-5].strip('\n').strip('/')) munits = s.s['mass'].units tunits = s.s['x'].units / s.s['vel'].units mdotunits = munits / tunits #munits = pynbody.units.Unit(np.str(1.9911e15)+' Msol') del (s) gc.collect() if not os.path.exists(simname + '.BHorbit.abridged'): #+np.str(Mlimit)): print "Makeing abridged Accretion log file..." Mlimitsim = Mlimit / munits.in_units('Msol') mdotlimit = Llimit / (0.1 * 3e10 * 3e10) mdotlimit /= mdotunits.in_units('g s**-1') cstr = """ awk '{if ($4 - $13 > """ + str( Mlimitsim ) + """ && $12 > """ + str( mdotlimit ) + """) print $4 " " $12 " " $13 " " $15 " " $16}' """ + simname + ".orbit > " + simname + ".BHorbit.abridged" os.system(cstr) print "reading in data..." mass, mdot, dM, dt, scale = readcol.readcol(simname + '.BHorbit.abridged', twod=False) print "done!" del (dt) gc.collect() #del(iord) #gc.collect() print "sorting time..." o = np.argsort(scale) #del(time) #gc.collect() print "sorting other stuff..." dM = pynbody.array.SimArray(dM[o], munits) mass = pynbody.array.SimArray(mass[o], munits) mdot = pynbody.array.SimArray(mdot[o], mdotunits) #time = pynbody.array.SimArray(time[o],tunits) scale = scale[o] del (o) gc.collect() print "summing..." rhoBH = np.cumsum(dM[( (mass.in_units('Msol') - dM.in_units('Msol') > Mlimit) & (mdot.in_units('g s**-1') * 0.1 * 3e10 * 3e10 > Llimit) )].in_units('Msol')) / vol scale = scale[((mass.in_units('Msol') - dM.in_units('Msol') > Mlimit) & (mdot.in_units('g s**-1') * 0.1 * 3e10 * 3e10 > Llimit))] del (mass) del (dM) del (mdot) gc.collect() #time = time[(mass.in_units('Msol')>Mlimit)] if filename: print "saving data..." f = open(filename, 'wb') pickle.dump([rhoBH, scale], f) f.close() return rhoBH, scale
def getAccretion(simname, BHlist=[],filename=False, allData=False): if not os.path.exists('files.list'): print "files.list not found. generating list of output files..." getFileLists(simname) bhids = getBHiords(simname) files = open("files.list",'r') f1 = files.readlines() s = pynbody.load(f1[0].strip('\n')) munits = s['mass'].units posunits = s['x'].units velunits = s['vx'].units potunits = s['phi'].units tunits = posunits/velunits Eunits = munits*potunits files.close() print "separating BH data..." acclogFile = simname+'.BHAccLog' os.system("awk -F ' ' '{print >$1}' "+acclogFile) #bhAccData = readcol.readcol(acclogFile) #bhids = np.unique(bhAccData[:,0]) if len(BHlist)>0: matches = np.in1d(bhids, BHlist) bhAccHist = {'iord':bhids[matches],'data':np.array([])} else: bhAccHist = {'iord':bhids,'data':np.array([])} print "there are ", len(bhids), " BHs that have existed in this simulation" if len(BHlist)>0: nBHs = len(BHlist) else: nBHs = len(bhids) print "getting data...." cnt = 0 for id in bhids: if len(BHlist)>0: match, = np.where(BHlist==id) if len(match)==0: os.system("rm "+str(np.int(id))) continue print "getting data for BH ", id bhAccData = readcol.readcol(str(np.int(id))) os.system("rm "+str(np.int(id))) bad, = np.where(bhAccData[:,0] != id) if len(bad)>0: print "WARNING: bad ID found in miniorbit.txt file after awk... deleting" bhAccData = np.delete(bhAccData,bad,axis=0) cnt += 1 GoodScale = True print "BH #"+str(cnt)+"/"+str(nBHs) time = bhAccData[:,2] o = np.argsort(time) timeOrd = time[o] t1 = timeOrd[0:len(timeOrd)-1] t2 = timeOrd[1:len(timeOrd)] bad = np.where(np.equal(t1,t2)) np.delete(o,bad) time = array.SimArray(time[o],tunits) iGasOrd = bhAccData[o,1] MgasInit = array.SimArray(bhAccData[o,3],munits) MbhInit = array.SimArray(bhAccData[o,4],munits) MgasFinal = array.SimArray(bhAccData[o,5],munits) MbhFinal = array.SimArray(bhAccData[o,6],munits) dMgas = array.SimArray(bhAccData[o,7],munits) dMBH = array.SimArray(bhAccData[o,8],munits) dMneed = array.SimArray(bhAccData[o,9],munits) scaleFac = bhAccData[o,19] dx = array.SimArray(bhAccData[o,10],posunits)*scaleFac dy = array.SimArray(bhAccData[o,11],posunits)*scaleFac dz = array.SimArray(bhAccData[o,12],posunits)*scaleFac dvx = array.SimArray(bhAccData[o,13],velunits) dvy = array.SimArray(bhAccData[o,14],velunits) dvz = array.SimArray(bhAccData[o,15],velunits) Ugas = array.SimArray(bhAccData[o,16],Eunits) fBall = array.SimArray(bhAccData[o,17],posunits)*scaleFac tCoolOff = array.SimArray(bhAccData[o,18],tunits) density = array.SimArray(bhAccData[o,20],munits/posunits**3)*scaleFac**(-3) temp = array.SimArray(bhAccData[o,21],'K') metals = array.SimArray(bhAccData[o,22]) if allData: datastruct = {'time':time.in_units('Gyr'),'Mgas':MgasInit.in_units('Msol'),'Mbh':MbhInit.in_units('Msol'),'MgasFinal':MgasFinal.in_units('Msol'),'MbhFinal':MbhFinal.in_units('Msol'),'deltaMgas':dMgas.in_units('Msol'),'deltaM':dMBH.in_units('Msol'),'Mneed':dMneed.in_units('Msol'),'dx':dx.in_units('kpc'),'dy':dy.in_units('kpc'),'dz':dz.in_units('kpc'),'dvx':dvx.in_units('kpc'),'dvy':dvy.in_units('kpc'),'dvz':dvz.in_units('kpc'),'Ugas':Ugas,'fBall':fBall.in_units('kpc',a=1),'tCoolOff':tCoolOff,'scaleFac':scaleFac,'density':density.in_units('m_p cm**-3',a=1),'temp':temp,'metals':metals} else: datastruct = {'time':time.in_units('Gyr'),'Mgas':MgasInit.in_units('Msol'),'Mbh':MbhInit.in_units('Msol'),'deltaM':dMBH.in_units('Msol'),'dx':dx.in_units('kpc',a=1),'dy':dy.in_units('kpc',a=1),'dz':dz.in_units('kpc',a=1),'dvx':dvx.in_units('km s**-1',a=1),'dvy':dvy.in_units('km s**-1',a=1),'dvz':dvz.in_units('km s**-1',a=1),'Ugas':Ugas,'fBall':fBall.in_units('kpc',a=1),'tCoolOff':tCoolOff,'scaleFac':scaleFac,'density':density.in_units('m_p cm**-3',a=1),'temp':temp,'metals':metals} bhAccHist['data'] = np.append(bhAccHist['data'],datastruct) del(s) if filename: f = open(str(filename),'wb') pickle.dump(bhAccHist,f) f.close() return bhAccHist
def getAccretion(simname, BHlist=[], filename=False, allData=False): if not os.path.exists('files.list'): print "files.list not found. generating list of output files..." getFileLists(simname) bhids = getBHiords(simname) files = open("files.list", 'r') f1 = files.readlines() s = pynbody.load(f1[0].strip('\n')) munits = s['mass'].units posunits = s['x'].units velunits = s['vx'].units potunits = s['phi'].units tunits = posunits / velunits Eunits = munits * potunits files.close() print "separating BH data..." acclogFile = simname + '.BHAccLog' os.system("awk -F ' ' '{print >$1}' " + acclogFile) #bhAccData = readcol.readcol(acclogFile) #bhids = np.unique(bhAccData[:,0]) if len(BHlist) > 0: matches = np.in1d(bhids, BHlist) bhAccHist = {'iord': bhids[matches], 'data': np.array([])} else: bhAccHist = {'iord': bhids, 'data': np.array([])} print "there are ", len(bhids), " BHs that have existed in this simulation" if len(BHlist) > 0: nBHs = len(BHlist) else: nBHs = len(bhids) print "getting data...." cnt = 0 for id in bhids: if len(BHlist) > 0: match, = np.where(BHlist == id) if len(match) == 0: os.system("rm " + str(np.int(id))) continue print "getting data for BH ", id bhAccData = readcol.readcol(str(np.int(id))) os.system("rm " + str(np.int(id))) bad, = np.where(bhAccData[:, 0] != id) if len(bad) > 0: print "WARNING: bad ID found in miniorbit.txt file after awk... deleting" bhAccData = np.delete(bhAccData, bad, axis=0) cnt += 1 GoodScale = True print "BH #" + str(cnt) + "/" + str(nBHs) time = bhAccData[:, 2] o = np.argsort(time) timeOrd = time[o] t1 = timeOrd[0:len(timeOrd) - 1] t2 = timeOrd[1:len(timeOrd)] bad = np.where(np.equal(t1, t2)) np.delete(o, bad) time = array.SimArray(time[o], tunits) iGasOrd = bhAccData[o, 1] MgasInit = array.SimArray(bhAccData[o, 3], munits) MbhInit = array.SimArray(bhAccData[o, 4], munits) MgasFinal = array.SimArray(bhAccData[o, 5], munits) MbhFinal = array.SimArray(bhAccData[o, 6], munits) dMgas = array.SimArray(bhAccData[o, 7], munits) dMBH = array.SimArray(bhAccData[o, 8], munits) dMneed = array.SimArray(bhAccData[o, 9], munits) scaleFac = bhAccData[o, 19] dx = array.SimArray(bhAccData[o, 10], posunits) * scaleFac dy = array.SimArray(bhAccData[o, 11], posunits) * scaleFac dz = array.SimArray(bhAccData[o, 12], posunits) * scaleFac dvx = array.SimArray(bhAccData[o, 13], velunits) dvy = array.SimArray(bhAccData[o, 14], velunits) dvz = array.SimArray(bhAccData[o, 15], velunits) Ugas = array.SimArray(bhAccData[o, 16], Eunits) fBall = array.SimArray(bhAccData[o, 17], posunits) * scaleFac tCoolOff = array.SimArray(bhAccData[o, 18], tunits) density = array.SimArray(bhAccData[o, 20], munits / posunits**3) * scaleFac**(-3) temp = array.SimArray(bhAccData[o, 21], 'K') metals = array.SimArray(bhAccData[o, 22]) if allData: datastruct = { 'time': time.in_units('Gyr'), 'Mgas': MgasInit.in_units('Msol'), 'Mbh': MbhInit.in_units('Msol'), 'MgasFinal': MgasFinal.in_units('Msol'), 'MbhFinal': MbhFinal.in_units('Msol'), 'deltaMgas': dMgas.in_units('Msol'), 'deltaM': dMBH.in_units('Msol'), 'Mneed': dMneed.in_units('Msol'), 'dx': dx.in_units('kpc'), 'dy': dy.in_units('kpc'), 'dz': dz.in_units('kpc'), 'dvx': dvx.in_units('kpc'), 'dvy': dvy.in_units('kpc'), 'dvz': dvz.in_units('kpc'), 'Ugas': Ugas, 'fBall': fBall.in_units('kpc', a=1), 'tCoolOff': tCoolOff, 'scaleFac': scaleFac, 'density': density.in_units('m_p cm**-3', a=1), 'temp': temp, 'metals': metals } else: datastruct = { 'time': time.in_units('Gyr'), 'Mgas': MgasInit.in_units('Msol'), 'Mbh': MbhInit.in_units('Msol'), 'deltaM': dMBH.in_units('Msol'), 'dx': dx.in_units('kpc', a=1), 'dy': dy.in_units('kpc', a=1), 'dz': dz.in_units('kpc', a=1), 'dvx': dvx.in_units('km s**-1', a=1), 'dvy': dvy.in_units('km s**-1', a=1), 'dvz': dvz.in_units('km s**-1', a=1), 'Ugas': Ugas, 'fBall': fBall.in_units('kpc', a=1), 'tCoolOff': tCoolOff, 'scaleFac': scaleFac, 'density': density.in_units('m_p cm**-3', a=1), 'temp': temp, 'metals': metals } bhAccHist['data'] = np.append(bhAccHist['data'], datastruct) del (s) if filename: f = open(str(filename), 'wb') pickle.dump(bhAccHist, f) f.close() return bhAccHist
def getBHMergers(simname,orbitfile=None,halofile=None,outputname=None,filename=None): if orbitfile: f = open(orbitfile,'rb') BHorbit = pickle.load(f) f.close() if halofile: f2 = open(halofile,'rb') BHhalo = pickle.load(f2) f2.close() if orbitfile or halofile: if not os.path.exists(orbitfile) or not os.path.exists(halofile): print "ERROR: cannot fine orbit and/or halo file" return if not os.path.exists('files.list'): print "files.list not found. generating list of output files..." getFileLists(simname) files = open("files.list",'r') f1 = files.readlines() s = pynbody.load(f1[0].strip('\n')) munits = s['mass'].units posunits = s['x'].units velunits = s['vx'].units potunits = s['phi'].units tunits = posunits/velunits Eunits = munits*potunits files.close() if not os.path.exists('BHmerge.txt'): if outputname==None: os.system("awk '/BHSink/ && /Merge/ && /eating/' *out* > BHmerge.txt") else: os.system("awk '/BHSink/ && /Merge/ && /eating/' *"+outputname+"* > BHmerge.txt") else: print "WARNING: BHmerge.txt already exists for this run... using that one. Please delete if you would like it to be updated." a,b,ID1,c,ID2,d, Time, e, f, kvel, g, h, Mratio = readcol.readcol('BHmerge.txt',twod=False) del(a,b,c,d,e,f,g,h) ID1 = np.int(ID1) ID2 = np.int(ID2) Time = np.float(Time) kvel = np.float(kvel) Mratio = np.float(Mratio) id2tmp, count = np.unique(ID2,return_counts=True) bad, = np.where(count>1) if len(bad)>0: print "Warning! Found a double counted merger. Fixing..." idbad = id2tmp[bad] for i in idbad: baddat, = np.where(ID2==idbad) np.delete(ID2,baddat[0:len(ID2)-1]) np.delete(ID1,baddat[0:len(ID2)-1]) np.delete(Time,baddat[0:len(ID2)-1]) np.delete(kvel,baddat[0:len(ID2)-1]) np.delete(Mratio,baddat[0:len(ID2)-1]) o = np.argsort(Time) Time = array.SimArray(Time[o],tunits) Time = Time.in_units('Gyr') ID1 = ID1[o] ID2 = ID2[o] kvel = kvel[o] Mratio = Mratio[o] nMergers = len(Time) print "found", nMergers, "BH-BH mergers occuring in simulation" M1 = array.SimArray(np.zeros(nMergers),'Msol') M2 = array.SimArray(np.zeros(nMergers),'Msol') HaloID1 = np.zeros(nMergers) HaloID2 = np.zeros(nMergers) HaloMass1 = array.SimArray(np.zeros(nMergers),'Msol') HaloGas1 = array.SimArray(np.zeros(nMergers),'Msol') HaloStars1 = array.SimArray(np.zeros(nMergers),'Msol') HaloMass2 = array.SimArray(np.zeros(nMergers),'Msol') HaloGas2 = array.SimArray(np.zeros(nMergers),'Msol') HaloStars2 = array.SimArray(np.zeros(nMergers),'Msol') if BHorbit or BHhalo: for i in range(nMergers): if BHorbit: no1, = np.where(BHorbit['iord']==ID1[i]) no2, = np.where(BHorbit['iord']==ID2[i]) to, = np.where(BHorbit['data'][no1]['Time'].in_units('Gyr')<Time[i]) if BHorbit['data'][no2]['Time'][-1].in_units('Gyr') > Time[1]: print "WARNING larger time in orbit file for BH", BHhalo['iord'][no2]," Tmerge", Time[i], "Torbit", BHorbit['data'][n1]['Time'].max() M1[i] = BHorbit['data'][no1]['mass'][to[-1]].in_units('Msol') M2[i] = BHorbit['data'][no2]['mass'][-1].in_units('Msol') if BHhalo: nh1, = np.where(BHhalo['iord']==ID1[i]) nh2, = np.where(BHhalo['iord']==ID2[i]) nonz, = np.where(BHhalo['mass'][nh2]>0) HaloID1[i] = BHhalo['haloID'][nh1][nonz[-1]] HaloID2[i] = BHhalo['haloID'][nh2][nonz[-1]] HaloMass1[i] = BHhalo['halomass'][nh1][nonz[-1]] HaloMass2[i] = BHhalo['halomass'][nh2][nonz[-1]] HaloGas1[i] = BHhalo['halogasmass'][nh1][nonz[-1]] HaloGas2i[i] = BHhalo['halogasmass'][nh2][nonz[-1]] HaloStars1[i] = BHhalo['halostarmass'][nh1][nonz[-1]] HaloStars2[i] = BHhalo['halostarmass'][nh2][nonz[-1]] BHmerge = {'Time':Time,'ID1':ID1,'ID2':ID2,'M1':M1,'M2':M2,'halo1':HaloID1,'halo2':HaloID2,'Hmass1':HaloMass1,'HGasMass1':HaloGas1,'HStarMass1':HaloStars1,'Hmass1':HaloMass2,'HGasMass1':HaloGas2,'HStarMass1':HaloStars2,'kickV':kvel,'ratio':Mratio} if filename: f = open(filename,'wb') pickle.dump(BHmerge, f) f.close() return BHmerge
print z unit = open(globals.codepath +'calibrations/zregion.reg','w') wsol = pf.open(globals.codepath +'calibrations/tspec_wavesol.fits')[0].data sz=wsol.shape unit.write('# Region file format: DS9 version 4.1\n') unit.write('# Filename: zregion.reg\n') unit.write('global color=green dashlist=8 3 width=3 font="helvetica 14 bold" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\n') unit.write('physical\n') llist=np.arange(80)/80. llist=llist.tolist() llist.extend(np.arange(40)/40.*2.+1.) print 'LList: ',llist,'\n' try: names,(ord,xx,yy,lam)=rc.readcol(globals.codepath +'calibrations/tspec_wavelength_file.dat",names=True,twod=False) print ord print xx print 'lam: ',lam z=float(z) print 'z= ',z lam=lam/(1.+z) m=max(lam) llist2=[i for i in llist if i < m] print llist2 r1=range(0,len(llist2)) r2=range(0,len(lam))
# Program to add info from cres into a table named res.tab stored in ./result from readcol import readcol from numpy import * import sys basedir = '/work/01208/jardel/super_scaled/' modnum = int( sys.argv[ 1 ] ) filein = 'model' + ( str( modnum ) ).rjust( 5, '0' ) + '.bin' rk, rhok = readcol( filein, twod = False ) nk = rk.size cresf = open( 'cres.mod' + ( str( modnum ) ).rjust( 5, '0' ), 'r' ) cres = cresf.readline() cresf.close() rhok = list( rhok ) creslist = cres.split() list2 = [ float( i ) for i in creslist ] out = rhok + list2 k = 0 out2 = [] for i in out: if( k <= nk - 1 ): out2.append( '%1.4e' % i + ' ') elif( k == nk or k == nk + 5 ): out2.append( '%05i' % i + ' ' ) # check that integer formatting is actually being used else: out2.append( '%5.5f' % i + ' ' )
import readcol import matplotlib.pyplot as plt import numpy as np import pandas as pd files = readcol.readcol('times.list') redshift = files[:, 0] time = files[:, 1] plt.plot(time, redshift) plt.tick_params(axis="x", labelcolor="r") plt.xlabel("Time") plt.ylabel("Redshift") plt.legend() plt.show()
def open_1d_txt(filename, xaxcol=0, datacol=1, errorcol=2, text_reader='simple', atpytype='ascii', **kwargs): """ Attempt to read a 1D spectrum from a text file assuming wavelength as the first column, data as the second, and (optionally) error as the third. Reading can be done either with atpy or a 'simple' reader. If you have an IPAC, CDS, or formally formatted table, you'll want to use atpy. If you have a simply formatted file of the form, e.g. # name name # unit unit data data data data kwargs are passed to atpy.Table """ if text_reader in ('simple','readcol') or not atpyOK: if not atpyOK: warn("WARNING: atpy not installed; will use simple reader instead.") if text_reader == 'simple': data, error, XAxis, T = simple_txt(filename, xaxcol = xaxcol, datacol = datacol, errorcol = errorcol, **kwargs) elif text_reader == 'readcol': Tlist = readcol.readcol(filename, twod=False, **kwargs) XAxis = units.SpectroscopicAxis(Tlist[xaxcol]) data = Tlist[datacol] error = Tlist[errorcol] T = dummy_class() Tdict = readcol.readcol(filename, asDict=True, **kwargs) T.data = dummy_class() T.data.dtype = dummy_class() T.data.dtype.names = hdr T.columns = {} T.columns[T.data.dtype.names[xaxcol]] = dummy_class() T.columns[T.data.dtype.names[xaxcol]].unit = colunits[xaxcol] T.columns[T.data.dtype.names[datacol]] = dummy_class() T.columns[T.data.dtype.names[datacol]].unit = colunits[datacol] elif text_reader in ('atpy','asciitable'): T = atpy.Table(filename, type=atpytype, masked=True, **kwargs) xarr = T.data[T.data.dtype.names[xaxcol]] data = T.data[T.data.dtype.names[datacol]] if len(T.columns) > errorcol: error = T.data[T.data.dtype.names[errorcol]] else: # assume uniform, zero error error = data*0 if 'xunits' in T.keywords: xunits = T.keywords['xunits'] else: xunits = 'unknown' XAxis = units.SpectroscopicAxis(xarr,xunits) # Need this in Spectrum class to correctly parse header T.xaxcol = xaxcol T.datacol = datacol return data, error, XAxis, T
import numpy as np from readcol import readcol from scipy.interpolate import LinearNDInterpolator as ll import os #right now this only returns Rosseland opacities. Ferguson does provide one file with planck opacities, but #it is not with the same abundances... what to do. skip it for now. path,fname=os.path.split(__file__) OpacityFname='GN93/g7.02.tron' d=readcol(os.path.join(path,OpacityFname),colNames=(['T']+list(np.linspace(-8,1,19)))) log_rhos={} xy=[] z=[] for k in np.linspace(-8,1,19): log_rhos[k]=np.log10(((10**(d['T']-6))**3)*(10**k)) xy.extend(zip(d['T'],log_rhos[k])) z.extend(10**d[k]) func=ll(np.array(xy),np.array(z)) def get_Ferguson_opacity(T,density=10**-14): kappa=func(np.log10(T),np.log10(density)) if np.any(np.isnan(kappa)): raise ValueError("you're outside of the convex-hull of the ferguson opacity data T=%f,rho=%e"%(T,density)) else: return kappa
#!/usr/bin/python2.7 # -*- coding: utf-8 -*- import pynbody import pylab import numpy as np import matplotlib.pylab as plt import readcol ''' r = (G * BH_Mass) / (stars_vel**2) G = 6.674e-11 ''' #Now I need a code that will load the snapshots(s will stand for ) Path = "/media/jillian//cptmarvel/cptmarvel.cosmo25cmb.4096g5HbwK1BH.004096/supersample/" files = readcol.readcol( '/media/jillian/cptmarvel/cptmarvel.cosmo25cmb.4096g5HbwK1BH.004096/supersample/files.list' ) all_files = files[:, 0] #Tell where BH is Function def findBH(s): BH = s.stars[pynbody.filt.LowPass('tform', 0.0)] return BH #std stands for standard deviation (velocity dispersion) def DispersionVelocity(s): velocity = s.stars['vel'] x = np.std(velocity[0]) y = np.std(velocity[1])
# de-project the velocity fields sPA = np.sin(PA * np.pi / 180.) cPA = np.cos(PA * np.pi / 180.) xmod = -sPA * xbin + cPA * ybin ymod = -cPA * xbin - sPA * ybin # radial extend of the stellar kinematics Rmod = (xmod**2 + (ymod / np.cos(incl * np.pi / 180.))**2)**0.5 Rm = np.max(Rmod) print 'gal, incl, Rmax =', gal, incl, Rm #----------------- # read V adn S kinemetry profiles R_arsec, PA_rad, er_PA_rad, q, er_q, k1, erk1, k51, erk51, Vsys = readcol( 'data_input/vel_' + gal + '.txt', skipline=1, twod=False) R_arsec2, PA_rad, er_PA_rad, q, er_q, k0, erk0, k51, erk51 = readcol( 'data_input/sigma_' + gal + '.txt', skipline=1, twod=False) R = R_arsec vobs = k1 sobs = k0 #evobs=erk1 # the error of the velocity from kinemetry routine #esobs=erk0 # the error of the velocity dispersion from kinemetry routine # calculation of the error of V and Sigma profiles within the ellipses, define in kinemetry routine dVbins = np.zeros(len(R)) dSbins = np.zeros(len(R)) for j in range(0, len(R), 1): dVbin_new = dVbin[(Rmod >= R[j] - 0.5) & (Rmod < R[j] + 0.5)] dSbin_new = dSbin[(Rmod >= R[j] - 0.5) & (Rmod < R[j] + 0.5)] dVbins[j] = np.nanmedian(dVbin_new)
def getBHorbit(simname, BHlist=[], filename=None): if not os.path.exists('files.list'): print "files.list not found. generating list of output files..." getFileLists(simname) print "getting all BH id numbers..." bhids = getBHiords(simname) files = open("files.list", 'r') f1 = files.readlines() s = pynbody.load(f1[0].strip('\n')) munits = s['mass'].units posunits = s['x'].units velunits = s['vx'].units potunits = s['phi'].units tunits = posunits / velunits Eunits = munits * potunits scaleUnit = pynbody.units.Unit('a') files.close() print posunits / scaleUnit print velunits / scaleUnit orbitfile = simname + ".orbit" #print "reading "+orbitfile+"...." #bhorbitData = readcol.readcol(orbitfile) #bhids = np.unique(bhorbitData[:,0]) if len(BHlist) > 0: matches = np.in1d(bhids, BHlist) bhorbit = {'iord': bhids[matches], 'data': np.array([])} else: bhorbit = {'iord': bhids, 'data': np.array([])} print "there are ", len(bhids), " BHs that have existed in this simulation" if len(BHlist) > 0: nBHs = len(BHlist) else: nBHs = len(bhids) print "getting data...." cnt = 0 os.system("awk -F ' ' '{print >$1}' " + orbitfile) print bhorbit['iord'] for id in bhids: print "getting data for BH ", id if len(BHlist) > 0: match, = np.where(BHlist == id) if len(match) == 0: os.system("rm " + str(np.int(id))) continue bhorbitData = readcol.readcol(str(np.int(id))) os.system("rm " + str(np.int(id))) bad, = np.where(bhorbitData[:, 0] != id) if len(bad) > 0: print "WARNING: bad ID found in miniorbit.txt file after awk... deleting" bhorbitData = np.delete(bhorbitData, bad, axis=0) cnt += 1 GoodScale = True print "BH #" + str(cnt) + "/" + str(len(bhids)) # curbh, = np.where(bhorbitData[:,0]==id) time = array.SimArray(bhorbitData[:, 1], tunits) step = bhorbitData[:, 2] mass = bhorbitData[:, 3] x = bhorbitData[:, 4] y = bhorbitData[:, 5] z = bhorbitData[:, 6] vx = bhorbitData[:, 7] vy = bhorbitData[:, 8] vz = bhorbitData[:, 9] pot = bhorbitData[:, 10] mdot = bhorbitData[:, 11] deltaM = bhorbitData[:, 12] E = bhorbitData[:, 13] dtEff = bhorbitData[:, 14] if len(bhorbitData[0, :]) < 16: print "uh oh, trying to find scale factor data, but cannot!" scaleFac = np.ones(len(bhorbitData[:, 1])) redshift = np.ones(len(bhorbitData[:, 1])) GoodScale = False else: scaleFac = bhorbitData[:, 15] redshift = 1 / scaleFac - 1 o = np.argsort(time) timeOrd = time[o] t1 = timeOrd[0:len(timeOrd) - 1] t2 = timeOrd[1:len(timeOrd)] bad = np.where(np.equal(t1, t2)) np.delete(o, bad) time = array.SimArray(time[o], tunits) step = step[o] mass = array.SimArray(mass[o], munits) x = array.SimArray(x[o] * scaleFac[o], posunits / scaleUnit) y = array.SimArray(y[o] * scaleFac[o], posunits / scaleUnit) z = array.SimArray(z[o] * scaleFac[o], posunits / scaleUnit) vx = array.SimArray(vx[o] * scaleFac[o], velunits / scaleUnit) vy = array.SimArray(vy[o] * scaleFac[o], velunits / scaleUnit) vz = array.SimArray(vz[o] * scaleFac[o], velunits / scaleUnit) pot = array.SimArray(pot[o], potunits) mdot = array.SimArray(mdot[o], munits / tunits) deltaM = array.SimArray(deltaM[o], munits) E = array.SimArray(E[o], Eunits) dtEff = array.SimArray(dtEff[o], tunits) scaleFac = scaleFac[o] redshift = redshift[o] if GoodScale: data = { 'Time': time, 'step': step, 'mass': mass.in_units('Msol'), 'x': x.in_units('kpc'), 'y': y.in_units('kpc'), 'z': z.in_units('kpc'), 'vx': vx.in_units('km s**-1'), 'vy': vy.in_units('km s**-1'), 'vz': vz.in_units('km s**-1'), 'pot': pot, 'mdot': mdot.in_units('Msol yr**-1'), 'dM': deltaM.in_units('Msol'), 'E': E, 'dt': dtEff, 'redshift': redshift, 'scaleFac': scaleFac } else: data = { 'Time': time, 'step': step, 'mass': mass.in_units('Msol'), 'x': x, 'y': y, 'z': z, 'vx': vx, 'vy': vy, 'vz': vz, 'pot': pot, 'mdot': mdot.in_units('Msol yr**-1'), 'dM': deltaM.in_units('Msol'), 'E': E, 'dt': dtEff, 'redshift': redshift, 'scaleFac': scaleFac } bhorbit['data'] = np.append(bhorbit['data'], data) del (s) if filename: f = open(str(filename), 'wb') pickle.dump(bhorbit, f) f.close() return bhorbit
#!/usr/bin/python2.7 # -*- coding: utf-8 -*- import pynbody import pylab import numpy as np import matplotlib.pylab as plt import readcol Path = "/media/jillian//cptmarvel/cptmarvel.cosmo25cmb.4096g5HbwK1BH.004096/supersample/nobh/" files = readcol.readcol( '/media/jillian/cptmarvel/cptmarvel.cosmo25cmb.4096g5HbwK1BH.004096/supersample/nobh/files.list' ) all_files = files[:, 0] filesBH = readcol.readcol('/home/lupe/Lupe_project-/highres.txt', skipline=1) radius_influence = np.array(filesBH[:, 6]) radius = radius_influence.astype(np.float) BHx_direction = np.array(filesBH[:, 2]) BHx = BHx_direction.astype(np.float) BHy_direction = np.array(filesBH[:, 3]) BHy = BHy_direction.astype(np.float) BHz_direction = np.array(filesBH[:, 4]) BHz = BHz_direction.astype(np.float) f = open("velocity_without_BH.txt", "w+") f.write("Velocity of stars in the galaxy without a BH '\n") j = 0 for i in all_files: a = pynbody.load(Path + i) a.physical_units() pynbody.analysis.halo.center(a)
# use photo metallicities from 2011 A&A 531, A152 with spread in age of trange mlrange = [ 1e9, -1e9 ] for i in Zrange: for j in trange: lo, hi = SSP_load( i, j, 'V' ) mlrange[ 0 ] = min( [ lo, mlrange[ 0 ] ] ) mlrange[ 1 ] = max( [ hi, mlrange[ 1 ] ] ) ml = mean( mlrange ) ml_err = max( [ ( mlrange[ 1 ] - mlrange[ 0 ] ) / 2., mlrange[ 1 ] - ml ] ) rl, light = readcol( 'light.dat', twod = False ) r, rhol, rhoread, rhoh = readcol( 'chi.out', twod = False ) rho_err = zeros( r.size ) rho = zeros( r.size ) ii = 0 for i in zip( rhol, rhoh ): rho[ ii ] = mean( [ i[ 0 ], i[ 1 ] ] ) rho_err[ ii ] = max( [ ( i[ 1 ] - i[ 0 ] ) / 2., i[ 1 ] - rho[ ii ] ] ) ii += 1 stars = interp( r, rl, light * ml ) stars_err = stars * ml_err dm = rho - stars
def getBHMergers(simname, orbitfile=None, halofile=None, outputname=None, filename=None): if orbitfile: f = open(orbitfile, 'rb') BHorbit = pickle.load(f) f.close() if halofile: f2 = open(halofile, 'rb') BHhalo = pickle.load(f2) f2.close() if orbitfile or halofile: if not os.path.exists(orbitfile) or not os.path.exists(halofile): print "ERROR: cannot fine orbit and/or halo file" return if not os.path.exists('files.list'): print "files.list not found. generating list of output files..." getFileLists(simname) files = open("files.list", 'r') f1 = files.readlines() s = pynbody.load(f1[0].strip('\n')) munits = s['mass'].units posunits = s['x'].units velunits = s['vx'].units potunits = s['phi'].units tunits = posunits / velunits Eunits = munits * potunits files.close() if not os.path.exists('BHmerge.txt'): if outputname == None: os.system( "awk '/BHSink/ && /Merge/ && /eating/' *out* > BHmerge.txt") else: os.system("awk '/BHSink/ && /Merge/ && /eating/' *" + outputname + "* > BHmerge.txt") else: print "WARNING: BHmerge.txt already exists for this run... using that one. Please delete if you would like it to be updated." a, b, ID1, c, ID2, d, Time, e, f, kvel, g, h, Mratio = readcol.readcol( 'BHmerge.txt', twod=False) del (a, b, c, d, e, f, g, h) ID1 = np.int(ID1) ID2 = np.int(ID2) Time = np.float(Time) kvel = np.float(kvel) Mratio = np.float(Mratio) id2tmp, count = np.unique(ID2, return_counts=True) bad, = np.where(count > 1) if len(bad) > 0: print "Warning! Found a double counted merger. Fixing..." idbad = id2tmp[bad] for i in idbad: baddat, = np.where(ID2 == idbad) np.delete(ID2, baddat[0:len(ID2) - 1]) np.delete(ID1, baddat[0:len(ID2) - 1]) np.delete(Time, baddat[0:len(ID2) - 1]) np.delete(kvel, baddat[0:len(ID2) - 1]) np.delete(Mratio, baddat[0:len(ID2) - 1]) o = np.argsort(Time) Time = array.SimArray(Time[o], tunits) Time = Time.in_units('Gyr') ID1 = ID1[o] ID2 = ID2[o] kvel = kvel[o] Mratio = Mratio[o] nMergers = len(Time) print "found", nMergers, "BH-BH mergers occuring in simulation" M1 = array.SimArray(np.zeros(nMergers), 'Msol') M2 = array.SimArray(np.zeros(nMergers), 'Msol') HaloID1 = np.zeros(nMergers) HaloID2 = np.zeros(nMergers) HaloMass1 = array.SimArray(np.zeros(nMergers), 'Msol') HaloGas1 = array.SimArray(np.zeros(nMergers), 'Msol') HaloStars1 = array.SimArray(np.zeros(nMergers), 'Msol') HaloMass2 = array.SimArray(np.zeros(nMergers), 'Msol') HaloGas2 = array.SimArray(np.zeros(nMergers), 'Msol') HaloStars2 = array.SimArray(np.zeros(nMergers), 'Msol') if BHorbit or BHhalo: for i in range(nMergers): if BHorbit: no1, = np.where(BHorbit['iord'] == ID1[i]) no2, = np.where(BHorbit['iord'] == ID2[i]) to, = np.where( BHorbit['data'][no1]['Time'].in_units('Gyr') < Time[i]) if BHorbit['data'][no2]['Time'][-1].in_units('Gyr') > Time[1]: print "WARNING larger time in orbit file for BH", BHhalo[ 'iord'][no2], " Tmerge", Time[i], "Torbit", BHorbit[ 'data'][n1]['Time'].max() M1[i] = BHorbit['data'][no1]['mass'][to[-1]].in_units('Msol') M2[i] = BHorbit['data'][no2]['mass'][-1].in_units('Msol') if BHhalo: nh1, = np.where(BHhalo['iord'] == ID1[i]) nh2, = np.where(BHhalo['iord'] == ID2[i]) nonz, = np.where(BHhalo['mass'][nh2] > 0) HaloID1[i] = BHhalo['haloID'][nh1][nonz[-1]] HaloID2[i] = BHhalo['haloID'][nh2][nonz[-1]] HaloMass1[i] = BHhalo['halomass'][nh1][nonz[-1]] HaloMass2[i] = BHhalo['halomass'][nh2][nonz[-1]] HaloGas1[i] = BHhalo['halogasmass'][nh1][nonz[-1]] HaloGas2i[i] = BHhalo['halogasmass'][nh2][nonz[-1]] HaloStars1[i] = BHhalo['halostarmass'][nh1][nonz[-1]] HaloStars2[i] = BHhalo['halostarmass'][nh2][nonz[-1]] BHmerge = { 'Time': Time, 'ID1': ID1, 'ID2': ID2, 'M1': M1, 'M2': M2, 'halo1': HaloID1, 'halo2': HaloID2, 'Hmass1': HaloMass1, 'HGasMass1': HaloGas1, 'HStarMass1': HaloStars1, 'Hmass1': HaloMass2, 'HGasMass1': HaloGas2, 'HStarMass1': HaloStars2, 'kickV': kvel, 'ratio': Mratio } if filename: f = open(filename, 'wb') pickle.dump(BHmerge, f) f.close() return BHmerge
def check_transformation_scatter_daophot( lfc_files, M_list=None, nx=4112, ny=4202, wrt='centre', n_sub=1, eps=0.5, return_residuals=True, ref_obsname='21sep30019', return_M_list=False, return_pixel_phase=False, lfc_path='/Users/christoph/OneDrive - UNSW/lfc_peaks/'): # INPUT: # 'lfc_files' - list of lfc files for which to check # 'M_list' - corresponding list of calculated transformation matrices # WARNING: They obviously need to be in the same order. See how it's done at the start of "check_all_shifts_with_telemetry" if M_list is not None: assert len(lfc_files) == len( M_list ), 'ERROR: list of files and list of matrices have different lengths!!!' else: M_list_new = [] # read reference LFC peak positions _, yref, xref, _, _, _, _, _, _, _, _ = readcol(lfc_path + ref_obsname + 'olc.nst', twod=False, skipline=2) xref = nx - xref yref = yref - 54. # or 53??? but does not matter for getting the transformation matrix if wrt == 'centre': xref -= nx // 2 yref -= ny // 2 ref_peaks_xy_list = [(xpos, ypos) for xpos, ypos in zip(xref, yref)] all_delta_x_list = [] all_delta_y_list = [] if return_pixel_phase: xphi_list = [] yphi_list = [] # loop over all files for i, lfc_file in enumerate(lfc_files): print('Processing observation ' + str(i + 1) + '/' + str(len(lfc_files)) + '...') # read observation LFC peak positions try: _, y, x, _, _, _, _, _, _, _, _ = readcol(lfc_file, twod=False, skipline=2) except: _, y, x, _, _, _, _, _, _ = readcol(lfc_file, twod=False, skipline=2) del _ x = nx - x y = y - 54. # or 53??? but does not matter for getting the transformation matrix if wrt == 'centre': x -= nx // 2 y -= ny // 2 obs_peaks_xy_list = [(xpos, ypos) for xpos, ypos in zip(x, y)] obs_peaks_xy = np.array(obs_peaks_xy_list).T if M_list is None: # NOTE that we do not want to shift to the centre twice, so we hard-code 'corner' here!!! (xref, yref, x, y) are already transformed above!!! M = find_affine_transformation_matrix( xref, yref, x, y, timit=True, eps=2., wrt='corner' ) # note that within "wavelength_solution" this is called "Minv" M_list_new.append(M) else: M = M_list[i] # now we need to match the peaks so we can compare the reference peaks with the (back-)transformed obs peaks good_ref_peaks = [] good_obs_peaks = [] for n, refpeak in enumerate(ref_peaks_xy_list): # print(n) shifted_obs_peaks = obs_peaks_xy - np.expand_dims( np.array(refpeak), axis=1) distance = np.sqrt(shifted_obs_peaks[0, :]**2 + shifted_obs_peaks[1, :]**2) if np.sum(distance < eps) > 0: if np.sum(distance < eps) > 1: print('FUGANDA: ', refpeak) print( 'There is probably a cosmic really close to an LFC peak - skipping this peak...' ) else: good_ref_peaks.append(refpeak) good_obs_peaks.append((obs_peaks_xy[0, distance < eps], obs_peaks_xy[1, distance < eps])) # print(n, refpeak, np.sum(distance < eps)) # calculate pixel phase as defined by Anderson & King, 2000, PASP, 112:1360 if return_pixel_phase: x_pixel_phase = np.squeeze(good_obs_peaks)[:, 0] - np.round( np.squeeze(good_obs_peaks)[:, 0], 0) y_pixel_phase = np.squeeze(good_obs_peaks)[:, 1] - np.round( np.squeeze(good_obs_peaks)[:, 1], 0) # divide good_ref_peaks into several subsections for a more detailed investigation x_step = nx / np.sqrt(n_sub).astype(int) y_step = ny / np.sqrt(n_sub).astype(int) x_centres = np.arange(0.5 * x_step, (np.sqrt(n_sub).astype(int) + 0.5) * x_step, x_step) y_centres = np.arange(0.5 * y_step, (np.sqrt(n_sub).astype(int) + 0.5) * y_step, y_step) if wrt == 'centre': x_centres -= nx // 2 y_centres -= ny // 2 peak_subsection_id = [] for refpeak in good_ref_peaks: # first, figure out which subsection this particular peak falls into xpos = refpeak[0] ypos = refpeak[1] nearest_x_ix = find_nearest(x_centres, xpos, return_index=True) nearest_y_ix = find_nearest(y_centres, ypos, return_index=True) # then save that information peak_subsection_id.append((nearest_x_ix, nearest_y_ix)) # give each subsection a label subsection_id = [] for j in range(np.sqrt(n_sub).astype(int)): for i in range(np.sqrt(n_sub).astype(int)): subsection_id.append((i, j)) # (x,y) # # divide chip into several subsections for a more detailed investigation # section_masks = [] # section_indices = [] # x_step = nx / np.sqrt(n_sub).astype(int) # y_step = ny / np.sqrt(n_sub).astype(int) # for j in range(np.sqrt(n_sub).astype(int)): # for i in range(np.sqrt(n_sub).astype(int)): # q = np.zeros((ny, nx), dtype='bool') # q[j * y_step : (j+1) * y_step, i * x_step : (i+1) * x_step] = True # section_masks.append(q) # section_indices.append((i,j)) # (x,y) # go to homogeneous coordinates (ie add a z-component equal to 1, so that we can include translation into the matrix) good_ref_peaks_xyz = np.hstack( (np.array(good_ref_peaks), np.expand_dims(np.repeat(1, len(good_ref_peaks)), axis=1))) good_obs_peaks_xyz = np.hstack( (np.squeeze(np.array(good_obs_peaks)), np.expand_dims(np.repeat(1, len(good_obs_peaks)), axis=1))) # calculate transformed co-ordinates (ie the observed peaks transformed back to match the reference peaks) xyz_prime = np.dot(good_obs_peaks_xyz, M) delta_x = good_ref_peaks_xyz[:, 0] - xyz_prime[:, 0] delta_y = good_ref_peaks_xyz[:, 1] - xyz_prime[:, 1] delta_x_list = [] delta_y_list = [] # loop over all subsections for tup in subsection_id: # find indices of peaks falling in each subsection ix = [i for i, x in enumerate(peak_subsection_id) if x == tup] if return_residuals: delta_x_list.append(delta_x[ix]) delta_y_list.append(delta_y[ix]) else: # return difference between ref and obs delta_x_list.append(good_ref_peaks_xyz[ix, 0] - good_obs_peaks_xyz[ix, 0]) delta_y_list.append(good_ref_peaks_xyz[ix, 1] - good_obs_peaks_xyz[ix, 1]) ##### DO THIS IF YOU WANT TO GET A TRANSFORMATION MATRIX FOR EVERY SUBSECTION OF THE CHIP ##### # M = find_affine_transformation_matrix(np.squeeze(good_ref_peaks)[ix,0], np.squeeze(good_ref_peaks)[ix,1], np.squeeze(good_obs_peaks)[ix,0], np.squeeze(good_obs_peaks)[ix,1], timit=True, eps=2., wrt='corner') # good_ref_peaks_xyz = np.hstack((np.array(good_ref_peaks)[ix], np.expand_dims(np.repeat(1, len(ix)), axis=1))) # good_obs_peaks_xyz = np.hstack((np.squeeze(np.array(good_obs_peaks)[ix]), np.expand_dims(np.repeat(1, len(ix)), axis=1))) # xyz_prime = np.dot(good_obs_peaks_xyz, M) # delta_x = good_ref_peaks_xyz[:, 0] - xyz_prime[:, 0] # delta_y = good_ref_peaks_xyz[:, 1] - xyz_prime[:, 1] # plt.plot(delta_x,'.') # print(np.std(delta_x)) # sub_M_list.append(M) # append to all-files list all_delta_x_list.append(delta_x_list) all_delta_y_list.append(delta_y_list) if M_list is None: M_list = M_list_new[:] if return_pixel_phase: if not return_M_list: return all_delta_x_list, all_delta_y_list, x_pixel_phase, y_pixel_phase else: return all_delta_x_list, all_delta_y_list, x_pixel_phase, y_pixel_phase, M_list else: if not return_M_list: return all_delta_x_list, all_delta_y_list else: return all_delta_x_list, all_delta_y_list, M_list
def getBHhalo(simname, findcenter='hyb', minHM=1e10, minNum=30, filename=None, initFile=None): if not os.path.exists("grpfiles.list"): simname_split = simname.split('.') num = len(simname_split) os.system('ls ' + simname + '.00*.grp | cut -d "." -f1-' + str(num + 1) + '> grpfiles.list') if filename: if os.path.exists(filename): print "file", filename, "already exists! reading it in and appending it with new data" f = open(filename, 'rb') BHhaloOLD = pickle.load(f) f.close() startStep = len(BHhaloOLD['haloID'][0]) os.system('rm ' + filename) print "old file has", startStep, "halos already completed" else: startStep = 0 if initFile: if os.path.exists(initFile): print "found file ", initFile, "reading it in now" f = open(initFile, 'rb') BHhaloOLD = pickle.load(f) f.close() startStep = len(BHhaloOLD['haloID'][0]) print "given file has", startStep, "halos already completed" if initFile == filename: print "given file has same name as target file... deleting old file to replace with new file" os.system('rm ' + filename) if initFile == None: startStep = 0 f = open("grpfiles.list") munits = 'Msol' vunits = 'km s**-1' posunits = 'kpc' cposunits = 'a kpc' print "finding BH iords..." bhiords = getBHiords(simname) files = f.readlines() f.close() nsteps = len(files) - startStep nbh = len(bhiords) bhmass = array.SimArray(np.zeros((nbh, nsteps)), munits) haloid = np.zeros((nbh, nsteps)) mhalo = array.SimArray(np.zeros((nbh, nsteps)), munits) mdark = array.SimArray(np.zeros((nbh, nsteps)), munits) mstar = array.SimArray(np.zeros((nbh, nsteps)), munits) mgas = array.SimArray(np.zeros((nbh, nsteps)), munits) #vhalo = array.SimArray(np.zeros((nbh,nsteps,3)),vunits) dist = array.SimArray(np.zeros((nbh, nsteps)), posunits) distcen = array.SimArray(np.zeros((nbh, nsteps)), posunits) bhpos = array.SimArray(np.zeros((nbh, nsteps, 3)), posunits) bhposcen = array.SimArray(np.zeros((nbh, nsteps, 3)), posunits) bhvel = array.SimArray(np.zeros((nbh, nsteps, 3)), vunits) bhvelcen = array.SimArray(np.zeros((nbh, nsteps, 3)), vunits) halorad = array.SimArray(np.zeros((nbh, nsteps)), posunits) scaleFac = np.zeros((nbh, nsteps)) interact = np.zeros((nbh, nsteps)) intpos = array.SimArray(np.zeros((nbh, nsteps, 3)), posunits) intvel = array.SimArray(np.zeros((nbh, nsteps, 3)), vunits) intdist = array.SimArray(np.zeros((nbh, nsteps)), posunits) #rho = array.SimArray(np.zeros((nbh,nsteps)),'g cm**-3') #cs = array.SimArray(np.zeros((nbh,nsteps)),'cm s**-1') for stepcnt in range(nsteps): line = files[stepcnt + startStep].strip() print "getting halo information for ", line s = pynbody.load(line) s.physical_units() cboxsize = 2 * s['x'].in_units('a kpc').max() simBH, = np.where(np.in1d(s.star['iord'], bhiords)) if not len(simBH): print "no BHs in this step! moving on..." continue boxsize = cboxsize.in_units('kpc') amigastat = readcol.readcol(line + '.amiga.stat', asdict=True) amigastat['cen'] = pynbody.array.SimArray( (np.array([amigastat['Xc'], amigastat['Yc'], amigastat['Zc']]).T * 1e3 - cboxsize / 2.) * s.properties['a'], posunits) h = s.halos() #simBH, = np.where(np.in1d(s.star['iord'],bhiords)) okgrp, = np.where(np.in1d(s.star['amiga.grp'][simBH], amigastat['Grp'])) simBH = simBH[okgrp] asort = np.argsort(s.star['iord'][simBH]) simBH = simBH[asort] simoutBH, = np.where(np.in1d(bhiords, s.star['iord'][simBH])) #outBH = np.where(np.in1d(bhiords,s.star['iord'][simBH])) print "there are ", len(simBH), "BHs in the step" allHaloID, invInd = np.unique(s.star['amiga.grp'][simBH], return_inverse=True) statind, = np.where(np.in1d(amigastat['Grp'], allHaloID)) #bad, = np.where(np.in1d(allHaloID,amigastat['Grp'])==False) #np.delete(allHaloID,bad) #badind, = np.where(np.in1d(invInd,bad)) #np.delete(invInd,badind) if not np.array_equal(allHaloID[invInd], amigastat['Grp'][statind[invInd]]): print "f**k!" return haloid[simoutBH, stepcnt] = allHaloID[invInd] mhalo[simoutBH, stepcnt] = pynbody.array.SimArray( amigastat['Mvir(M_sol)'][statind[invInd]], munits) mstar[simoutBH, stepcnt] = pynbody.array.SimArray( amigastat['StarMass(M_sol)'][statind[invInd]], munits) mgas[simoutBH, stepcnt] = pynbody.array.SimArray( amigastat['GasMass(M_sol)'][statind[invInd]], munits) mgas[simoutBH, stepcnt] = pynbody.array.SimArray( amigastat['GasMass(M_sol)'][statind[invInd]], munits) halorad[simoutBH, stepcnt] = pynbody.array.SimArray( amigastat['Rvir(kpc)'][statind[invInd]] * s.properties['a'], posunits) scaleFac[simoutBH, stepcnt] = s.properties['a'] vel = np.array([ amigastat['VXc'][statind[invInd]], amigastat['VYc'][statind[invInd]], amigastat['VZc'][statind[invInd]] ]).T bhvel[simoutBH, stepcnt, :] = s.stars['vel'][simBH].in_units(vunits) - vel postemp = s.stars['pos'][simBH].in_units(posunits) - amigastat['cen'][ statind[invInd]] postemp[(np.abs(postemp) > boxsize / 2.)] = -1.0 * ( postemp[(np.abs(postemp) > boxsize / 2.)] / np.abs(postemp[(np.abs(postemp) > boxsize / 2.)])) * ( boxsize - np.abs(postemp[(np.abs(postemp) > boxsize / 2.)])) bhpos[simoutBH, stepcnt, :] = postemp bhmass[simoutBH, stepcnt] = s.stars['mass'][simBH].in_units(munits) dist[simoutBH, stepcnt] = np.sqrt((bhpos[simoutBH, stepcnt, :]**2).sum(axis=1)) for cnt in range(len(allHaloID)): if allHaloID[cnt] == 0: continue print allHaloID[cnt] oo, = np.where(amigastat['Grp'] == allHaloID[cnt]) #cen = pynbody.array.SimArray([amigastat['Xc'][oo[0]],amigastat['Yc'][oo[0]],amigastat['Zc'][oo[0]]],posunits) if amigastat['Mvir(M_sol)'][ (amigastat['Grp'] == allHaloID[cnt])] < minHM and allHaloID[cnt] > minNum: continue okcenter = 1 try: pynbody.analysis.halo.center(h[allHaloID[cnt]], mode=findcenter, wrap=True, cen_size='2 kpc') except ValueError: okcenter = 0 pynbody.analysis.halo.center(h[allHaloID[cnt]], mode=findcenter, Wrap=True, cen_size='2 kpc', vel=False) haloBHs, = np.where( np.in1d(h[allHaloID[cnt]].star['iord'], bhiords)) outBH, = np.where( np.in1d(bhiords, h[allHaloID[cnt]].star['iord'][haloBHs])) #pynbody.transformation.inverse_translate(s,cen) closeBHs, = np.where((s.star['r'][simBH].in_units( 'kpc') < amigastat['Rvir(kpc)'][oo] * s.properties['a']) & (s.star['amiga.grp'][simBH] > allHaloID[cnt])) closeBHs = simBH[closeBHs] otheroutBHs, = np.where(np.in1d(bhiords, s.star['iord'][closeBHs])) bhposcen[outBH, stepcnt, :] = h[ allHaloID[cnt]].stars['pos'][haloBHs].in_units(posunits) distcen[outBH, stepcnt] = h[ allHaloID[cnt]].stars['r'][haloBHs].in_units(posunits) interact[otheroutBHs, stepcnt] = allHaloID[cnt] intpos[otheroutBHs, stepcnt, :] = s.stars['pos'][closeBHs].in_units(posunits) #intvel[otheroutBHs,stepcnt,:] = s.stars['vel'][closeBHs].in_units(vunits) intdist[otheroutBHs, stepcnt] = s.stars['r'][closeBHs].in_units(posunits) if okcenter == 1: intvel[otheroutBHs, stepcnt, :] = s.stars['vel'][closeBHs].in_units(vunits) bhvelcen[outBH, stepcnt, :] = h[ allHaloID[cnt]].stars['vel'][haloBHs].in_units(vunits) print "deleting stuff" del (s) del (h) gc.collect() bhhalo = { 'iord': bhiords, 'mass': bhmass, 'pos': bhpos, 'poscen': bhposcen, 'vel': bhvel, 'velcen': bhvelcen, 'haloID': haloid, 'halomass': mhalo, 'halostarmass': mstar, 'halodarkmass': mdark, 'halogasmass': mgas, 'rhalo': halorad, 'dist': dist, 'distcen': distcen, 'interact': interact, 'intdist': intdist, 'intvel': intvel, 'intpos': intpos, 'scaleFac': scaleFac } if startStep != 0: bhhalo['mass'] = np.append(BHhaloOLD['mass'], bhhalo['mass'], axis=1) bhhalo['pos'] = np.append(BHhaloOLD['pos'], bhhalo['pos'], axis=1) bhhalo['poscen'] = np.append(BHhaloOLD['poscen'], bhhalo['poscen'], axis=1) bhhalo['vel'] = np.append(BHhaloOLD['vel'], bhhalo['vel'], axis=1) bhhalo['velcen'] = np.append(BHhaloOLD['velcen'], bhhalo['velcen'], axis=1) bhhalo['haloID'] = np.append(BHhaloOLD['haloID'], bhhalo['haloID'], axis=1) bhhalo['halomass'] = np.append(BHhaloOLD['halomass'], bhhalo['halomass'], axis=1) bhhalo['halostarmass'] = np.append(BHhaloOLD['halostarmass'], bhhalo['halostarmass'], axis=1) bhhalo['halodarkmass'] = np.append(BHhaloOLD['halodarkmass'], bhhalo['halodarkmass'], axis=1) bhhalo['halogasmass'] = np.append(BHhaloOLD['halogasmass'], bhhalo['halogasmass'], axis=1) bhhalo['rhalo'] = np.append(BHhaloOLD['rhalo'], bhhalo['rhalo'], axis=1) bhhalo['dist'] = np.append(BHhaloOLD['dist'], bhhalo['dist'], axis=1) bhhalo['distcen'] = np.append(BHhaloOLD['distcen'], bhhalo['distcen'], axis=1) bhhalo['interact'] = np.append(BHhaloOLD['interact'], bhhalo['interact'], axis=1) bhhalo['intdist'] = np.append(BHhaloOLD['intdist'], bhhalo['intdist'], axis=1) bhhalo['intpos'] = np.append(BHhaloOLD['intpos'], bhhalo['intpos'], axis=1) bhhalo['intvel'] = np.append(BHhaloOLD['intvel'], bhhalo['intvel'], axis=1) bhhalo['scaleFac'] = np.append(BHhaloOLD['scaleFac'], bhhalo['scaleFac'], axis=1) if filename: f = open(str(filename), 'wb') pickle.dump(bhhalo, f) f.close() return bhhalo
def res_gauss1(params, g, x): return g - gauss1(params, x) def el_stl(wam, fl, SLi, SLf): for i in range(len(SLi)): if SLi[i] > wam[-1]: break I = numpy.where((wam >= SLi[i]) & (wam <= SLf[i]))[0] fl[I] = 1.0 return fl SLi, SLf = readcol.readcol('lines2.dat', twod=False) model_path = '/media/VERBATIM/COHELO_MODELS/RES_MOD/R_60000b/' model_path = '/data/ajordan/COHELO_MODELS/R_60000b/' non_rot = os.listdir(model_path + 'vsini_0.0/') rot = [ 0.0, 2.5, 5.0, 7.5, 10.0, 15.0, 20.0, 25.0, 30.0, 35.0, 40.0, 45.0, 50.0 ] i = 0 f = open('anchos.dat', 'w') verdad = False non_rot = non_rot[200:] for fits in non_rot: sc = pyfits.getdata(model_path + 'vsini_0.0/' + fits) hd = pyfits.getheader(model_path + 'vsini_0.0/' + fits)
def plotAccDens_v_z(rhoBH, scale, data=True, style='b-', ylog=True, xlog=True, overplot=False, lw=2, label=False): shankar09L = 3.2e5 shankar09H = 5.4e5 Salvaterra12 = 0.66e4 Salvaterra12zH = 9 Salvaterra12zL = 5 Treister13 = np.array([851., 666., 674.]) Treister13z = np.array([6.5, 7.5, 8.5]) Treister13zErr = np.array([.5, .5, .5]) Hopkins07zp1, Hopkins07 = readcol.readcol( "/nobackupp8/mtremmel/DATA/QSOdata/RhoAccZ.csv", twod=False) Hopkins07zp1H, Hopkins07H = readcol.readcol( "/nobackupp8/mtremmel/DATA/QSOdata/RhoAccZPLUS.csv", twod=False) Hopkins07zp1L, Hopkins07L = readcol.readcol( "/nobackupp8/mtremmel/DATA/QSOdata/RhoAccZMINUS.csv", twod=False) Hopkins07perr = 10**Hopkins07H - 10**Hopkins07 Hopkins07merr = 10**Hopkins07 - 10**Hopkins07L plt.plot(scale**-1, rhoBH, style, linewidth=lw, label=label) if data: shankar09 = (shankar09H + shankar09L) / 2. err = shankar09H - shankar09 plt.errorbar([1.03], [shankar09], yerr=[err], color='black', fmt='D', label="Shankar+ 09") Salvaterra12z = (Salvaterra12zH + Salvaterra12zL) / 2. plt.errorbar([Salvaterra12z + 1], [Salvaterra12], color='black', fmt='x', xerr=[Salvaterra12zH - Salvaterra12z], yerr=0.5 * Salvaterra12, uplims=[True], label='Salvaterra+ 12') plt.errorbar(Treister13z, Treister13, color='black', fmt='o', xerr=Treister13zErr, yerr=0.5 * Treister13, uplims=[True, True, True], label='Treister+ 13') plt.errorbar(Hopkins07zp1, 10**Hopkins07, color='grey', fmt='o', yerr=(Hopkins07merr, Hopkins07perr), label='Hopkins+ 07') if not overplot: if ylog: plt.yscale('log', base=10) if xlog: plt.xscale('log', base=10) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']) plt.ylabel(r'log($\rho_{acc}$ [M$_{\odot}$ Mpc$^{-3}$])', fontsize=30) plt.xlabel('Redshift', fontsize=30) return
# DEFINITIONS # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% root = "../" reduce_key = "reduce-key.txt" flag_script_dir = "flagging/" mod_dir = "/usr/lib64/casapy/data/nrao/VLA/CalModels/" # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% # READ THE KEY FILE # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% use_tag, track_in, flags_in, \ source_in, fluxcal_in, bpcal_in, phasecal_in, \ refant_in, model_in = \ readcol(reduce_key, twod=False) use = (use_tag == 'y').nonzero() track_list = track_in flags_list = flags_in source_list = source_in fluxcal_list = fluxcal_in bpcal_list = bpcal_in phasecal_list = phasecal_in refant_list = refant_in model_list = model_in # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% # FLAGGING # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
utmjd_start = np.array([ pyfits.getval(fn, 'UTMJD') for fn in file_list ]) + 2.4e6 + 0.5 # the fits header has 2,400,000.5 subtracted!!!!! utmjd = utmjd_start + (np.array(texp_list) / 2.) / 86400. sortix = np.argsort(utmjd) all_obsnames = np.array(obsname_list)[sortix] files = np.array(file_list)[sortix] wls = np.array(file_list)[sortix] all_bc = np.array(bc_list)[sortix] all_jd = utmjd[sortix] ######################################################################################################################## ######################################################################################################################## ######################################################################################################################## all_snr = readcol(path + 'tauceti_all_snr.dat', twod=False)[0] # # get mean SNR per collapsed pixel # all_snr = [] # for i, file in enumerate(files): # print('Estimating mean SNR for tau Ceti observation ' + str(i + 1) + '/' + str(len(files))) # flux = pyfits.getdata(file, 0) # err = pyfits.getdata(file, 1) # all_snr.append(get_snr(flux, err)) # np.savetxt(path + 'tauceti_all_snr.dat', np.array(all_snr)) ######################################################################################################################## ######################################################################################################################## ######################################################################################################################## # calculating the CCFs for one order / 11 orders
def getBHorbit(simname,BHlist=[],filename=None): if not os.path.exists('files.list'): print "files.list not found. generating list of output files..." getFileLists(simname) print "getting all BH id numbers..." bhids = getBHiords(simname) files = open("files.list",'r') f1 = files.readlines() s = pynbody.load(f1[0].strip('\n')) munits = s['mass'].units posunits = s['x'].units velunits = s['vx'].units potunits = s['phi'].units tunits = posunits/velunits Eunits = munits*potunits scaleUnit = pynbody.units.Unit('a') files.close() print posunits/scaleUnit print velunits/scaleUnit orbitfile = simname+".orbit" #print "reading "+orbitfile+"...." #bhorbitData = readcol.readcol(orbitfile) #bhids = np.unique(bhorbitData[:,0]) if len(BHlist)>0: matches = np.in1d(bhids, BHlist) bhorbit = {'iord':bhids[matches],'data':np.array([])} else: bhorbit = {'iord':bhids,'data':np.array([])} print "there are ", len(bhids), " BHs that have existed in this simulation" if len(BHlist)>0: nBHs = len(BHlist) else: nBHs = len(bhids) print "getting data...." cnt = 0 os.system("awk -F ' ' '{print >$1}' "+orbitfile) print bhorbit['iord'] for id in bhids: print "getting data for BH ", id if len(BHlist)>0: match, = np.where(BHlist==id) if len(match)==0: os.system("rm "+str(np.int(id))) continue bhorbitData = readcol.readcol(str(np.int(id))) os.system("rm "+str(np.int(id))) bad, = np.where(bhorbitData[:,0] != id) if len(bad)>0: print "WARNING: bad ID found in miniorbit.txt file after awk... deleting" bhorbitData = np.delete(bhorbitData,bad,axis=0) cnt += 1 GoodScale = True print "BH #"+str(cnt)+"/"+str(len(bhids)) # curbh, = np.where(bhorbitData[:,0]==id) time = array.SimArray(bhorbitData[:,1],tunits) step = bhorbitData[:,2] mass = bhorbitData[:,3] x = bhorbitData[:,4] y = bhorbitData[:,5] z = bhorbitData[:,6] vx = bhorbitData[:,7] vy = bhorbitData[:,8] vz = bhorbitData[:,9] pot = bhorbitData[:,10] mdot = bhorbitData[:,11] deltaM = bhorbitData[:,12] E = bhorbitData[:,13] dtEff = bhorbitData[:,14] if len(bhorbitData[0,:])<16: print "uh oh, trying to find scale factor data, but cannot!" scaleFac = np.ones(len(bhorbitData[:,1])) redshift = np.ones(len(bhorbitData[:,1])) GoodScale = False else: scaleFac = bhorbitData[:,15] redshift = 1/scaleFac - 1 o = np.argsort(time) timeOrd = time[o] t1 = timeOrd[0:len(timeOrd)-1] t2 = timeOrd[1:len(timeOrd)] bad = np.where(np.equal(t1,t2)) np.delete(o,bad) time = array.SimArray(time[o],tunits) step = step[o] mass = array.SimArray(mass[o],munits) x = array.SimArray(x[o]*scaleFac[o],posunits/scaleUnit) y = array.SimArray(y[o]*scaleFac[o],posunits/scaleUnit) z = array.SimArray(z[o]*scaleFac[o],posunits/scaleUnit) vx = array.SimArray(vx[o]*scaleFac[o],velunits/scaleUnit) vy = array.SimArray(vy[o]*scaleFac[o],velunits/scaleUnit) vz = array.SimArray(vz[o]*scaleFac[o],velunits/scaleUnit) pot = array.SimArray(pot[o],potunits) mdot = array.SimArray(mdot[o],munits/tunits) deltaM = array.SimArray(deltaM[o],munits) E = array.SimArray(E[o],Eunits) dtEff = array.SimArray(dtEff[o],tunits) scaleFac = scaleFac[o] redshift = redshift[o] if GoodScale: data = {'Time':time,'step':step,'mass':mass.in_units('Msol'),'x':x.in_units('kpc'),'y':y.in_units('kpc'),'z':z.in_units('kpc'),'vx':vx.in_units('km s**-1'),'vy':vy.in_units('km s**-1'),'vz':vz.in_units('km s**-1'),'pot':pot,'mdot':mdot.in_units('Msol yr**-1'),'dM':deltaM.in_units('Msol'),'E':E,'dt':dtEff,'redshift':redshift,'scaleFac':scaleFac} else: data = {'Time':time,'step':step,'mass':mass.in_units('Msol'),'x':x,'y':y,'z':z,'vx':vx,'vy':vy,'vz':vz,'pot':pot,'mdot':mdot.in_units('Msol yr**-1'),'dM':deltaM.in_units('Msol'),'E':E,'dt':dtEff,'redshift':redshift,'scaleFac':scaleFac} bhorbit['data'] = np.append(bhorbit['data'],data) del(s) if filename: f = open(str(filename),'wb') pickle.dump(bhorbit,f) f.close() return bhorbit
import numpy as np from readcol import readcol from scipy.interpolate import interp1d import os path,fname=os.path.split(__file__) data=readcol(os.path.join(path,'kPkR.dat'),colNames=['T','P161','R161','P125','R125']) R_low_func=interp1d(data['T'],data['R161']) R_high_func=interp1d(data['T'],data['R125']) P_low_func=interp1d(data['T'],data['P161']) P_high_func=interp1d(data['T'],data['P125']) def get_Semenov_opacity(T,density=10**-16.1,Rosseland=True): '''Get the disk opacity according to the iron-poor silicate model of Semenov et al. 2003 models. This function interpolates between temperature and density. The density interpolation is probably not what we want, and density=10**-16.1 or density=10**-12.5 should be used inputs: T - the temperature of the medium in Kelvin density - the volumetric density in g/cm^3 Rosseland - (bool) True to use the Rosseland mean, False to use the Planck mean opacity''' if Rosseland==True: low_density=R_low_func(T) high_density=R_high_func(T) delta_density=(density-10**-16.1)/(10**-12.5-10**-16.1) return low_density+(high_density-low_density)*delta_density elif Rosseland==False: low_density=P_low_func(T) high_density=P_high_func(T) delta_density=(density-10**-16.1)/(10**-12.5-10**-16.1)
# Assignement: h242 simulation)[focusing on galaxy merger at z=4] import pynbody import numpy as np from numpy import _NoValue import pandas as pd import matplotlib.pylab as plt import readcol import BH_functions as BHF files = readcol.readcol('/data/scratch/jillian/h242/files.list') files = files[:, 0] # function to find black hole def findBH(s): #BHfilter = pynbody.filt.LowPass('tform',0.0) #BHfilter = np.where((s.stars['iord']==101863565) | ( s.stars['iord']==101863705)) BHfilter = np.where((s.stars['iord'] == 75288565) | (s.stars['iord'] == 75288831)) BH = s.stars[BHfilter] return BH #function to find the halos that the galaxy is in def findBHhalos(s): BH = findBH(s) BHhalos = BH['amiga.grp'] return BHhalos
else: # smass = mass[ iclose[ 0 ] ] smass = linint( L, m1, mass ) # lum = 10**( ( 5.11 - L + dist ) / 2.5 ) # smass = lum**(.25 ) # if on HB, all stars have same mass # if( L < 21 and L > 19.6 and color > -0.5 and color < 0.5 ): # smass = .8424 # print isocolor[ iclose[ 0 ] ], color, clim, smass, L return smass rah, ram, ras, decd, decm, decs, gmag, egmag, t1, imag, eimag, t2 = readcol( 'megacam.tab', twod = False ) nstars = len( rah ) coords = [] for i in range( 0, nstars ): stringra = str( rah[ i ] ) + ':' + str( ram[ i ] ) + ':' + str( ras[ i ] ) stringdec = str(decd[ i ] ) + ':' + str( decm[ i ] ) + ':' + str( decs[ i ] ) stringcoords = stringra + ' ' + stringdec co = c.Position( stringcoords ) coords.append( co.dd() ) coords = array( coords ) center = zeros( 2 ) center[ 0 ] = 260.055 center[ 1 ] = 57.915 # check conversions on a few of these theta = m.radians( 88. + 90. )
#this is to interpolate the data of the galaxy center of mass compared to both black holes positions and graph the result at the end import numpy as np import pandas as pd import matplotlib.pyplot as plt import readcol import scipy.interpolate from scipy import interpolate # Read data from files files1 = readcol.readcol('60352986.fits.txt') files2 = readcol.readcol('60354630.fits.txt') files3 = readcol.readcol('Ruth.dat') time1 = files1[:, 0] time2 = files2[:, 0] time3 = files3[:, 1] x1 = files1[:, 1] x2 = files2[:, 1] y1 = files1[:, 2] y2 = files2[:, 2] z1 = files1[:, 3] z2 = files2[:, 3] x = files3[:, 2] y = files3[:, 3] z = files3[:, 4] # interpolate for BH1 '60352986' and BH2 '60354630' result1 = interpolate.interp1d(x, time3) x1_position = result1(time1) x2_position = result1(time2)
gallist = [ 'carina', 'fornax', 'sculptor', 'sex' ] d = {} d[ 'carina' ] = [ 104e3, 246.69, 325.20 ] # [ distance, Re, re ] d[ 'fornax' ] = [ 135e3, 689, 900 ] d[ 'sculptor' ] = [ 85e3, 319.17, 539.91 ] d[ 'sextans' ] = [ 85e3, 697.58, 926.30 ] for gal in gallist: if gal in pwd: #galname = gal.upper()[ 0 ] + gal[ 1: ] galname = gal if galname == 'sex': galname = 'sextans' r_surf, surf = readcol( 'surf/sb.mag', twod = False ) dist = d[ galname ][ 0 ] r_surf *= dist / 206265. zp = 26.402 surf_solar = 10**( ( zp - surf ) / 2.5 ) r_kin, sigma, sigma_err = readcol( 'kin/sigma.out', twod = False ) sigma_low_int = np.interp( r_surf, r_kin, sigma - sigma_err ) sigma_hi_int = np.interp( r_surf, r_kin, sigma + sigma_err ) sigma_int = np.interp( r_surf, r_kin, sigma ) integrand_hi = sigma_hi_int**2 * surf_solar integrand_low = sigma_low_int**2 * surf_solar integrand = sigma_int**2 * surf_solar sigma_los_hi = scipy.integrate.trapz( integrand_hi, x = r_surf ) / scipy.integrate.trapz(
import readcol import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import axes3d import numpy as np fig = plt.figure() ax1 = fig.add_subplot(111, projection='3d') ebh = readcol.readcol('/home/shared/data/h148/eatenbh.orbit') Unit_conversion = 50000.0 X = ebh[:,4] * Unit_conversion Y = ebh[:,5] * Unit_conversion Z = ebh[:,6] * Unit_conversion ax1.plot(X,Y,Z) ax1.set_xlabel('x axis') ax1.set_ylabel('y axis') ax1.set_zlabel('z axis') plt.show()
import readcol f = readcol.readcol('/home/shared/data/h148/testarraymainbh.orbit') print f[0] print f.shape