def peroux06b(): """Peroux, C. et al. 2006b, A&A, 450, 53 SDSS J1323-0021 Metal rich Metal columns copied by JXP from Table 1 Total NHI from damping wings """ # Setup radec = '132323.78-002155.2' # SDSS Name lls = LLSSystem(name='SDSSJ1323-0021_z0.716', radec=radec, zem=1.390, zabs=0.716, vlim=[-200., 200.]*u.km/u.s, NHI=20.21, sig_NHI=np.array([0.20,0.20])) # Parse table file tab_fil = pyigm_path+"/data/LLS/Literature/peroux06b.tb1.ascii" with open(tab_fil,'r') as f: flines = f.readlines() ion_dict = {} for iline in flines: isplit = iline.split('\t') if len(isplit[0]) == 0: # Grab ions and init ions = isplit[3:10] for ion in ions: Zion = ltai.name_ion(ion) ion_dict[ion] = dict(clm=0., sig_clm=0.,flg_clm=1,Z=Zion[0],ion=Zion[1]) continue # Column or sigma? if isplit[0][0] == 'N': # Column for kk,iis in enumerate(isplit[3:10]): ion = ions[kk] if iis[0] == '>': ion_dict[ion]['flg_clm'] = 2 ion_dict[ion]['clm'] += float(iis[1:]) elif iis[0] == '<': pass elif iis[0] == '.': pass else: ion_dict[ion]['clm'] += float(iis) else: # Sigma for kk,iis in enumerate(isplit[3:10]): ion = ions[kk] if iis[0] == '.': pass else: ion_dict[ion]['sig_clm'] += float(iis)**2 # Convert to log for ion in ions: N = ion_dict[ion]['clm'] sig = np.sqrt(ion_dict[ion]['sig_clm']) # ion_dict[ion]['clm'] = np.log10(N) if ion_dict[ion]['flg_clm'] == 2: ion_dict[ion]['sig_clm'] = 0. else: ion_dict[ion]['sig_clm'] = sig/N/np.log(10) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Prx06b') return lls
def kacprzak12(): '''Kacprzak, G. et al. 2012, MNRAS, 427, 3029-3043 TON 153 Taken from Table 1 by JXP NHI from Churchill+2007 RA/DEC from Simbad ''' # Setup radec = '131956.2209+272808.271' lls = LLSSystem(name='TON153_z1.002', radec=radec, zem=0.6610, zabs=1.0023, vlim=[-250., 200.]*u.km/u.s, NHI=18.30, sig_NHI=np.array([0.30,0.30])) # Table 1 (total) ion_dict = {} ion_dict['Mg II'] = dict(clm=13.11, sig_clm=0.07,flg_clm=1,Z=12,ion=2) ion_dict['Mg I'] = dict(clm=11.54, sig_clm=0.06,flg_clm=1,Z=12,ion=1) ion_dict['Si I'] = dict(clm=11.8, sig_clm=0.00,flg_clm=3,Z=14,ion=1) ion_dict['Si II'] = dict(clm=13.16, sig_clm=0.11,flg_clm=1,Z=14,ion=2) ion_dict['Si IV'] = dict(clm=12.4, sig_clm=0.0,flg_clm=3,Z=14,ion=4) ion_dict['C II'] = dict(clm=13.39, sig_clm=0.0,flg_clm=2,Z=6,ion=2) ion_dict['C III'] = dict(clm=14.20, sig_clm=0.05,flg_clm=1,Z=6,ion=3) ion_dict['C III'] = dict(clm=14.41, sig_clm=0.05,flg_clm=1,Z=6,ion=4) ion_dict['O VI'] = dict(clm=14.49, sig_clm=0.05,flg_clm=1,Z=8,ion=6) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Kcz12') return lls
def kacprzak12(): '''Kacprzak, G. et al. 2012, MNRAS, 427, 3029-3043 TON 153 Taken from Table 1 by JXP NHI from Churchill+2007 RA/DEC from Simbad ''' # Setup radec = '131956.2209+272808.271' lls = LLSSystem(name='TON153_z1.002', radec=radec, zem=0.6610, zabs=1.0023, vlim=[-250., 200.] * u.km / u.s, NHI=18.30, sig_NHI=np.array([0.30, 0.30])) # Table 1 (total) ion_dict = {} ion_dict['Mg II'] = dict(clm=13.11, sig_clm=0.07, flg_clm=1, Z=12, ion=2) ion_dict['Mg I'] = dict(clm=11.54, sig_clm=0.06, flg_clm=1, Z=12, ion=1) ion_dict['Si I'] = dict(clm=11.8, sig_clm=0.00, flg_clm=3, Z=14, ion=1) ion_dict['Si II'] = dict(clm=13.16, sig_clm=0.11, flg_clm=1, Z=14, ion=2) ion_dict['Si IV'] = dict(clm=12.4, sig_clm=0.0, flg_clm=3, Z=14, ion=4) ion_dict['C II'] = dict(clm=13.39, sig_clm=0.0, flg_clm=2, Z=6, ion=2) ion_dict['C III'] = dict(clm=14.20, sig_clm=0.05, flg_clm=1, Z=6, ion=3) ion_dict['C III'] = dict(clm=14.41, sig_clm=0.05, flg_clm=1, Z=6, ion=4) ion_dict['O VI'] = dict(clm=14.49, sig_clm=0.05, flg_clm=1, Z=8, ion=6) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Kcz12') return lls
def peroux06a(): """Peroux, C. et al. 2006a, MNRAS, 372, 369 SDSS J0134+0051 One of her sample Metal columns taken by JXP from Table 2 (no online data) Total NHI from damping wings """ # Setup radec = '013405.75+005109.4' # SDSS Name lls = LLSSystem(name='SDSSJ0134+0051_z0.842', radec=radec, zem=1.522, zabs=0.842, vlim=[-150., 150.]*u.km/u.s, NHI=19.93, sig_NHI=np.array([0.15,0.15])) # Table 2 ion_dict = {} N = np.sum(np.array([5.56,12.6,13.7,23.5,61.4,39.8,6,9.14])*1e10) sig = np.sqrt(np.sum((np.array([2.32,3.1,3.68,4.13,8.02,6.65,3.37,2.82])*1e10)**2)) ion_dict['Mg I'] = dict(clm=np.log10(N), sig_clm=sig/N/np.log(10),flg_clm=1,Z=12,ion=1) ion_dict['Mg II'] = dict(clm=np.log10(5e13), sig_clm=0.,flg_clm=2,Z=12,ion=2) N = np.sum(np.array([8.17,4.28,32.1,125,710,301,893,600,263,65.7])*1e11) sig = np.sqrt(np.sum((np.array([2.63,1.40,2.37,8.6,53.2,28.4,73.5,61.7,14.0,2.95])*1e11)**2)) ion_dict['Fe II'] = dict(clm=np.log10(N), sig_clm=sig/N/np.log(10),flg_clm=1,Z=26,ion=2) sig = np.sqrt(np.sum((np.array([3.72,1.84,2.36,3.83])*1e11)**2)) ion_dict['Zn II'] = dict(clm=np.log10(2*sig), sig_clm=0.,flg_clm=3,Z=30,ion=2) sig = np.sqrt(np.sum((np.array([19.4,9.79])*1e11)**2)) ion_dict['Cr II'] = dict(clm=np.log10(2*sig), sig_clm=0.,flg_clm=3,Z=24,ion=2) # Not including MnII. Appears as a detection but also given as a limit.. # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Prx06a') return lls
def test_fluxmodel(): # Init lls = LLSSystem((0.*u.deg, 0.*u.deg), 2.5, None, NHI=17.9) # Fill LLS lines lls.fill_lls_lines() # Generate a spectrum wave = np.arange(3000., 6500) npix = len(wave) spec = XSpectrum1D.from_tuple((wave*u.AA,np.ones(npix))) # Model model = lls.flux_model(spec) np.testing.assert_allclose(model.flux[100].value,0.009424664763760516)
def test_fluxmodel(): # Init lls = LLSSystem((0. * u.deg, 0. * u.deg), 2.5, None, NHI=17.9) # Fill LLS lines lls.fill_lls_lines() # Generate a spectrum wave = np.arange(3000., 6500) npix = len(wave) spec = XSpectrum1D.from_tuple((wave * u.AA, np.ones(npix))) # Model model = lls.flux_model(spec) np.testing.assert_allclose(model.flux[100].value, 0.009424664763760516, rtol=1e-5)
def main(args=None): from astropy.coordinates import SkyCoord from astropy import units as u from linetools import utils as ltu from pyigm.abssys.dla import DLASystem from pyigm.abssys.lls import LLSSystem if args is None: pargs = parser() else: pargs = args # Coordinates if pargs.jcoord is not None: coord = ltu.radec_to_coord(pargs.jcoord) else: coord = SkyCoord(ra=0., dec=0., unit='deg') # vlim if pargs.vlim is not None: vlims = [float(vlim) for vlim in pargs.vlim.split(',')]*u.km/u.s else: vlims = None # go if pargs.itype == 'dla': isys = DLASystem(coord, pargs.zabs, vlims, pargs.NHI, zem=pargs.zem, sig_NHI=pargs.sigNHI) elif pargs.itype == 'lls': isys = LLSSystem(coord, pargs.zabs, vlims, NHI=pargs.NHI, zem=pargs.zem, sig_NHI=pargs.sigNHI) else: raise IOError("Not prepared for this type of IGMSystem") # Write isys.write_json(pargs.outfile)
def test_simple_init(): # Init lls = LLSSystem((0. * u.deg, 0. * u.deg), 2.0, None, NHI=17.9) # np.testing.assert_allclose(lls.vlim[0].value, -500.) np.testing.assert_allclose(lls.NHI, 17.9) np.testing.assert_allclose(lls.tau_LL, 5.035377286841938, rtol=1e-5)
def __init__(self, abssys_list, parent=None, only_one=False, linelist=None, no_buttons=False): ''' only_one: bool, optional Restrict to one selection at a time? [False] no_buttons: bool, optional Eliminate Refine/Reload buttons? ''' super(AbsSysWidget, self).__init__(parent) #if not status is None: # self.statusBar = status self.abssys_list = abssys_list # Speeds things up if linelist is None: self.linelist = LineList('ISM') else: self.linelist = linelist # Create the line list list_label = QtGui.QLabel('Abs Systems:') self.abslist_widget = QtGui.QListWidget(self) if not only_one: self.abslist_widget.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection) self.abslist_widget.addItem('None') #self.abslist_widget.addItem('Test') # Lists self.abs_sys = [] self.items = [] self.all_items = [] self.all_abssys = [] for abssys_fil in self.abssys_list: self.all_abssys.append(LLSSystem.from_absid_fil(abssys_fil, linelist=self.linelist)) self.add_item(abssys_fil) self.abslist_widget.setCurrentRow(0) self.abslist_widget.itemSelectionChanged.connect(self.on_list_change) # Layout vbox = QtGui.QVBoxLayout() vbox.addWidget(list_label) # Buttons if not no_buttons: buttons = QtGui.QWidget() self.refine_button = QtGui.QPushButton('Refine', self) #self.refine_button.clicked.connect(self.refine) # CONNECTS TO A PARENT reload_btn = QtGui.QPushButton('Reload', self) reload_btn.clicked.connect(self.reload) hbox1 = QtGui.QHBoxLayout() hbox1.addWidget(self.refine_button) hbox1.addWidget(reload_btn) buttons.setLayout(hbox1) vbox.addWidget(buttons) vbox.addWidget(self.abslist_widget) self.setLayout(vbox)
def reload(self): print('AbsSysWidget: Reloading systems..') self.all_abssys = [] for abssys_fil in self.abssys_list: self.all_abssys.append( LLSSystem.from_absid_fil(abssys_fil, linelist=self.linelist)) #self.add_item(abssys_fil) self.on_list_change()
def reload(self): print('AbsSysWidget: Reloading systems..') self.all_abssys = [] for abssys_fil in self.abssys_list: self.all_abssys.append(LLSSystem.from_absid_fil(abssys_fil, linelist=self.linelist)) #self.add_item(abssys_fil) self.on_list_change()
def add_LLS(self, z, NHI=17.3, bval=20. * u.km / u.s, comment='None', model=True): """Generate a new LLS """ # new_sys = LLSSystem((0 * u.deg, 0 * u.deg), z, [-300., 300] * u.km / u.s, NHI=NHI) new_sys.bval = bval # This is not standard, but for convenience new_sys.comment = comment new_sys.fill_lls_lines(bval=bval, do_analysis=0) #QtCore.pyqtRemoveInputHook() #pdb.set_trace() #QtCore.pyqtRestoreInputHook() # Name self.count_lls += 1 new_sys.label = 'LLS_Sys_{:d}'.format(self.count_lls) # Add self.abssys_widg.add_fil(new_sys.label) self.abssys_widg.all_abssys.append(new_sys) self.abssys_widg.abslist_widget.item(len( self.abssys_widg.all_abssys)).setSelected(True) # Update self.llist['Plot'] = False # Turn off metal-lines if model: # For dealing with initialization self.update_model()
def test_dat_init(): # JXP .dat files if os.getenv('LLSTREE') is None: assert True return # Read datfil = 'Data/UM184.z2929.dat' lls = LLSSystem.from_datfile(datfil, tree=os.getenv('LLSTREE')) # np.testing.assert_allclose(lls.zabs, 2.93012)
def test_parse_ion(): # JXP .ion file if os.getenv('LLSTREE') is None: assert True return # Read datfil = 'Data/UM184.z2929.dat' lls = LLSSystem.from_datfile(datfil, tree=os.getenv('LLSTREE')) # lls.get_ions(use_Nfile=True) assert len(lls._ionN) == 13
def insert_dlas(sightline, overlap=False, rstate=None, slls=False, mix=False, high=False, noise=False): """ Insert a DLA into input spectrum Also adjusts the noise Will also add noise 'everywhere' if requested Parameters ---------- sightline:dla_cnn.data_model.sightline.Sightline object overlap: bool noise: bool, optional Returns ------- None """ #init if rstate is None: rstate = np.random.RandomState() spec = XSpectrum1D.from_tuple( (10**sightline.loglam, sightline.flux)) #generate xspectrum1d # Generate DLAs dlas = [] spec_dlas = [] zabslist = init_zabs(sightline, overlap) for zabs in zabslist: # Random NHI NHI = uniform_NHI(slls=slls, mix=mix, high=high) spec_dla = Dla((1 + zabs) * 1215.6701, NHI, '00' + str(jj)) if (slls or mix): dla = LLSSystem((sightline.ra, sightline.dec), zabs, None, NHI=NHI) else: dla = DLASystem((sightline.ra, sightline.dec), zabs, None, NHI) dlas.append(dla) spec_dlas.append(spec_dla) # Insert dlas to one sightline vmodel, _ = hi_model(dlas, spec, fwhm=3.) #add noise if noise: rand = rstate.randn(len(sightline.flux)) noise = rand * sightline.error * np.sqrt(1 - vmodel.flux.value**2) else: noise = 0 final_spec = XSpectrum1D.from_tuple( (vmodel.wavelength, spec.flux.value * vmodel.flux.value + noise)) #generate new sightline sightline.flux = final_spec.flux.value sightline.dlas = spec_dlas sightline.s2n = estimate_s2n(sightline)
def nestor08(): '''Nestor, D. et al. 2008, MNRAS, 390, 1670-1682 Q2149+212 Taken from Table 1 by JXP NHI from RTN06 (damping wings) RA/DEC from STIS header ''' # Setup lls = LLSSystem(name='SDSSJ2151+2130_z1.002', radec=(327.94096*u.deg, 21.503750*u.deg), zem=1.534, zabs=1.0023, vlim=[-300., 300.]*u.km/u.s, NHI=19.30, sig_NHI=np.array([0.10,0.10])) # Meiring06, Table 4 ion_dict = {} ion_dict['Zn II'] = dict(clm=12.13, sig_clm=0.,flg_clm=3,Z=30,ion=2) ion_dict['Cr II'] = dict(clm=12.59, sig_clm=0.,flg_clm=3,Z=24,ion=2) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Nes08') return lls
def get_dla(zabs, NHI, matrix_lam, matrix_flux, wvoff=60.): spec = XSpectrum1D.from_tuple((matrix_lam, matrix_flux)) if NHI < 20.3: dla = LLSSystem((0, 0), zabs, None, NHI=NHI) else: dla = DLASystem((0, 0), zabs, None, NHI) wvcen = (1 + zabs) * 1215.67 gd_wv = (spec.wavelength.value > wvcen - wvoff - 30) & (spec.wavelength.value < wvcen + wvoff + 30) co = 1.5 #np.mean(spec.flux[gd_wv])#amax lya, lines = hi_model(dla, spec, lya_only=True) return lya.wavelength[gd_wv], co * lya.flux[gd_wv]
def main(args=None): from linetools import utils as ltu pargs = parser() # Read jdict = ltu.loadjson(pargs.jsonfile) if 'class' not in jdict.keys(): raise KeyError("This script only works with JSON files with named classes") if jdict['class'] == 'IGMSightline': from pyigm.igm.igmsightline import IGMSightline obj = IGMSightline.from_dict(jdict) flg_tbl = True elif jdict['class'] == 'DLASystem': from pyigm.abssys.dla import DLASystem obj = DLASystem.from_dict(jdict) flg_tbl = False elif jdict['class'] == 'LLSSystem': from pyigm.abssys.lls import LLSSystem obj = LLSSystem.from_dict(jdict) obj.fill_ionN() flg_tbl = True # Column density table else: raise IOError("Not prepared for this class: {:s}".format(jdict['class'])) # name try: name = jdict['name'] except KeyError: try: name = jdict['Name'] except: name = 'None' print("Name of object: {:s}".format(name)) # Generate table if flg_tbl: if jdict['class'] == 'IGMSightline': tbl = obj.build_table() elif jdict['class'] == 'LLSSystem': tbl = obj._ionN tbl['logN'].format = '5.2f' tbl['sig_logN'].format = '5.2f' tbl['vmin'].format = '8.1f' tbl['vmax'].format = '8.1f' else: tbl = None # Print if len(tbl) > 0: tbl.pprint(99999, max_width=120) else: print("Table was empty..")
def meiring06(): """Meiring et al. 2006, MNRAS, 370, 43 Q1107+0003 Taken from Table 4 by JXP NHI from RTN06 (damping wings) RA/DEC from STIS header """ # Setup lls = LLSSystem(name='SDSSJ1107+0003_z0.954', radec=(166.90273*u.deg, 0.05795000*u.deg), zem=1.726, zabs=0.9542, vlim=[-300., 300.]*u.km/u.s, NHI=20.26, sig_NHI=np.array([0.14,0.09])) # Meiring06, Table 4 ion_dict = {} ion_dict['Zn II'] = dict(clm=12.08, sig_clm=0.,flg_clm=3,Z=30,ion=2) ion_dict['Ti II'] = dict(clm=13.01, sig_clm=0.,flg_clm=3,Z=22,ion=2) ion_dict['Cr II'] = dict(clm=12.76, sig_clm=0.,flg_clm=3,Z=24,ion=2) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Mei06') return lls
def zonak2004(): """Zonak, S. et al. 2004, ApJ, 2004, 606, 196 PG1634+706 HST+Keck spectra MgII, SiIV, SiIII from Table 2. Summing Subsystems A (Model 2) and B Errors estimated by JXP (not reported) SiIII in A may be a model SiIV in B may be a model Total NHI from LL. Taken from Fig 3 caption. Error estimated by JXP Not all EWs in Table 1 included Adopting their M/H """ # Setup radec = '163428.9897+703132.422' # SIMBAD lls = LLSSystem(name='PG1634+706_z1.041', radec=radec, zem=1.337, zabs=1.0414, vlim=[-250., 100.]*u.km/u.s, NHI=17.23, ZH=-1.4, sig_NHI=np.array([0.15,0.15])) # SubSystems lls.nsub = 2 # Abundances adict = dict(MgII={'clm': log_sum([11.45,11.90,12.02,11.68]), 'sig_clm': 0.05, 'flg_clm': 1}, SiIII={'clm': log_sum([12.5,12.5,12.8,12.7]), 'sig_clm': 0.25, 'flg_clm': 1}, SiIV={'clm': log_sum([10.9,10.8,11.2,11.1]), 'sig_clm': 0.15, 'flg_clm': 1} ) lls.subsys['A'] = AbsSubSystem(lls, 1.0414, [-80, 100]*u.km/u.s, 'A') lls.subsys['A']._ionN = pyiau.dict_to_ions(adict) bdict = dict(SiIII={'clm': log_sum([11.8,12.8,12.4]), 'sig_clm': 0.15, 'flg_clm': 1}, SiIV={'clm': log_sum([11.2,12.2,11.8]), 'sig_clm': 0.15, 'flg_clm': 1} ) lls.subsys['B'] = AbsSubSystem(lls, 1.0414, [-240, -80]*u.km/u.s, 'B') lls.subsys['B']._ionN = pyiau.dict_to_ions(bdict) # Total lls._ionN = pyiau.sum_ionN(lls.subsys['A']._ionN, lls.subsys['B']._ionN) lls.Refs.append('Zon04') # Return return lls
def add_LLS(self, z, NHI=17.3, bval=20.0 * u.km / u.s, comment="None", model=True): """Generate a new LLS """ # new_sys = LLSSystem((0 * u.deg, 0 * u.deg), z, [-300.0, 300] * u.km / u.s, NHI=NHI) new_sys.bval = bval # This is not standard, but for convenience new_sys.comment = comment new_sys.fill_lls_lines(bval=bval, do_analysis=0) # Name self.count_lls += 1 new_sys.label = "LLS_Sys_{:d}".format(self.count_lls) # Add self.abssys_widg.add_fil(new_sys.label) self.abssys_widg.all_abssys.append(new_sys) self.abssys_widg.abslist_widget.item(len(self.abssys_widg.all_abssys)).setSelected(True) # Update self.llist["Plot"] = False # Turn off metal-lines if model: # For dealing with initialization self.update_model()
def tripp2005(): '''Tripp, T. et al. 2005, ApJ, 2005, 619, 714 PG 1216+069 (LLS in Virgo) HST/STIS, FUSE Metal columns parsed from Tables 2 and 3 Total NHI from damping wings M/H from O/H ''' # Grab ASCII files from ApJ tab_fils = [ pyigm_path + "/data/LLS/tripp2005.tb3.ascii", pyigm_path + "/data/LLS/tripp2005.tb2.ascii" ] urls = [ 'http://iopscience.iop.org/0004-637X/619/2/714/fulltext/60797.tb3.txt', 'http://iopscience.iop.org/0004-637X/619/2/714/fulltext/60797.tb2.txt' ] for jj, tab_fil in enumerate(tab_fils): chk_fil = glob.glob(tab_fil) if len(chk_fil) > 0: tab_fil = chk_fil[0] else: url = urls[jj] print('LLSSurvey: Grabbing table file from {:s}'.format(url)) f = urlopen(url) with open(tab_fil, "wb") as code: code.write(f.read()) # Setup radec = '121920.9320+063838.476' # SIMBAD lls = LLSSystem(name='PG1216+069_z0.006', radec=radec, zem=0.3313, zabs=0.00632, vlim=[-100., 100.] * u.km / u.s, NHI=19.32, ZH=-1.6, sig_NHI=np.array([0.03, 0.03])) # Columns # Start with Table 3 (VPFIT) with open(tab_fils[0], 'r') as f: flines3 = f.readlines() ion_dict = {} for iline in flines3: if (len(iline.strip()) == 0): continue isplit = iline.split('\t') # Ion flg = 2 if (len(isplit[0].strip()) > 0): # & (isplit[0][0] not in ['1','2']): ipos = isplit[0].find('1') ionc = isplit[0][0:ipos - 1].strip() try: Zion = ltai.name_ion(ionc) except KeyError: pdb.set_trace() flg = 1 # Column csplit = isplit[3].split(' ') clm = float(csplit[0]) sig = float(csplit[2]) if flg == 1: ion_dict[ionc] = dict(logN=clm, sig_logN=sig, flag_N=1, Z=Zion[0], ion=Zion[1]) else: # Add it in tmp_dict = dict(logN=clm, sig_logN=sig, flag_N=1, Z=Zion[0], ion=Zion[1]) flagN, logN, siglogN = ltaa.sum_logN(ion_dict[ionc], tmp_dict) ion_dict[ionc]['logN'] = logN ion_dict[ionc]['sig_logN'] = siglogN ions = ion_dict.keys() # Now Table 2 for the extras with open(tab_fils[1], 'r') as f: flines2 = f.readlines() # Trim the first 10 lines flines2 = flines2[10:] # Loop for iline in flines2: isplit = iline.split('\t') # ionc = isplit[0].strip() if (len(ionc) == 0) or (ionc in ions): continue # Zion = ltai.name_ion(ionc) ion_dict[ionc] = dict(Z=Zion[0], ion=Zion[1], sig_logN=0.) if isplit[4][0] == '<': ion_dict[ionc]['logN'] = float(isplit[4][1:]) ion_dict[ionc]['flag_N'] = 3 else: raise ValueError('Should not get here') # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Tri05') return lls
def battisti12(): '''Battisti, A. et al. 2012, ApJ, 744, 93 HST/COS QSO info from Table 1 Metal columns parsed from Table 3 NHI from Lya ''' all_lls = [] # Grab ASCII files from ApJ tab_fils = [ pyigm_path + "/data/LLS/Literature/battisti12.tb1.ascii", pyigm_path + "/data/LLS/Literature/battisti12.tb3.ascii" ] urls = [ 'http://iopscience.iop.org/0004-637X/744/2/93/suppdata/apj413924t1_ascii.txt', 'http://iopscience.iop.org/0004-637X/744/2/93/suppdata/apj413924t3_ascii.txt' ] for jj, tab_fil in enumerate(tab_fils): chk_fil = glob.glob(tab_fil) if len(chk_fil) > 0: tab_fil = chk_fil[0] else: url = urls[jj] print('LLSSurvey: Grabbing table file from {:s}'.format(url)) f = urlopen(url) with open(tab_fil, "wb") as code: code.write(f.read()) # QSO info with open(tab_fils[0], 'r') as f: flines1 = f.readlines() # Grab RA/DEC all_idict = [] for iline in flines1: if iline[0:2] != 'SD': continue # Parse isplit = iline.split('\t') name = isplit[0].split(' ')[1] radec = name[1:] zem = float(isplit[1].strip()) zabs = float(isplit[2].strip()) NHI = float(isplit[3].strip()[0:4]) sigNHI = np.array([float(isplit[3].strip()[11:])] * 2) # Save lls = LLSSystem(name=name, radec=radec, zem=zem, zabs=zabs, NHI=NHI, sig_NHI=sigNHI, vlim=[-500, 500] * u.km / u.s) # all_lls.append(lls) all_idict.append({}) # Abundances with open(tab_fils[1], 'r') as f: flines3 = f.readlines() flines3 = flines3[5:] ion = None for iline in flines3: if ion == 'Ni II': break isplit = iline.split('\t') if isplit[0] == 'C II*': # Skipping CII* continue # ion ipos = -1 while (isplit[0][ipos] not in ['I', 'V']): ipos -= 1 ion = isplit[0][0:ipos + 1 + len(isplit[0])] Zion = ltai.name_ion(ion) # Loop on systems for kk, iis in enumerate(isplit[1:-1]): if iis.strip()[0] == '.': continue all_idict[kk][ion] = dict(Z=Zion[0], ion=Zion[1], sig_clm=0.) if iis[0] == '>': all_idict[kk][ion]['flg_clm'] = 2 all_idict[kk][ion]['clm'] = float(iis[1:6]) elif iis[0] == '<': all_idict[kk][ion]['flg_clm'] = 3 all_idict[kk][ion]['clm'] = float(iis[1:]) else: all_idict[kk][ion]['flg_clm'] = 1 all_idict[kk][ion]['clm'] = float(iis[0:5]) all_idict[kk][ion]['sig_clm'] = float(iis[-4:]) # Return SLLS only for kk, lls in enumerate(all_lls): try: lls._ionN = pyiau.dict_to_ions(all_idict[kk]) except ValueError: pdb.set_trace() lls.Refs.append('Bat12') fin_slls = [ills for ills in all_lls if ills.NHI < 20.3] return fin_slls
def jenkins2005(): """Jenkins, E. et al. 2005, ApJ, 2005, 623, 767 PHL 1811 HST/STIS, FUSE Metals parsed from Table 1 OI taken from text Had to input error on columns by hand (JXP) Total NHI from Lyman series. see Fig 3 M/H from O/H """ # Grab ASCII file from ApJ tab_fil = pyigm_path + "/data/LLS/Literature/jenkins2005.tb1.ascii" chk_fil = glob.glob(tab_fil) if len(chk_fil) > 0: tab_fil = chk_fil[0] else: url = 'http://iopscience.iop.org/0004-637X/623/2/767/fulltext/61520.tb1.txt' print('LLSSurvey: Grabbing table file from {:s}'.format(url)) f = urlopen(url) with open(tab_fil, "wb") as code: code.write(f.read()) # Setup radec = '215501.5152-092224.688' # SIMBAD lls = LLSSystem(name='PHL1811_z0.081', radec=radec, zem=0.192, zabs=0.080923, vlim=[-100., 100.] * u.km / u.s, NHI=17.98, ZH=-0.19, sig_NHI=np.array([0.05, 0.05])) lls.lines = [] # Probably not used # AbsLines ism = LineList('ISM') Nsig = { 'C IV': 0.4, 'N II': 0.4, 'Si II': 0.05, 'Si IV': 0.25, 'S II': 0.2, 'Fe II': 0.12, 'H I': 0.05, 'S III': 0.06 } # Parse Table with open(tab_fil, 'r') as f: flines = f.readlines() ion_dict = {} for iline in flines: iline = iline.strip() if (len(iline) == 0): continue # Split on tabs isplit = iline.split('\t') # Offset? ioff = 0 if isplit[0][0] in ['1', '2']: ioff = -1 # Catch bad lines if (isplit[1 + ioff][0:6] in ['1442.0', '1443.7', '1120.9']): # Skip goofy CII line and CII* continue if len(isplit[2 + ioff]) == 0: continue # Ion if (len(isplit[0].strip()) > 0) & (isplit[0][0] not in ['1', '2']): ionc = isplit[0].strip() try: Zion = ltai.name_ion(ionc) except KeyError: pdb.set_trace() # Generate the Line try: newline = AbsLine(float(isplit[2 + ioff]) * u.AA, linelist=ism, closest=True) except ValueError: pdb.set_trace() newline.attrib['z'] = lls.zabs # Spectrum newline.analy['datafile'] = 'STIS' if 'S' in isplit[1] else 'FUSE' # EW try: EWvals = isplit[4 + ioff].split(' ') except IndexError: pdb.set_trace() newline.attrib['EW'] = float(EWvals[0]) * u.AA / 1e3 newline.attrib['sig_EW'] = float(EWvals[2]) * u.AA / 1e3 newline.attrib['flag_EW'] = 1 if len(isplit) < (5 + ioff + 1): continue # Colm? #xdb.set_trace() newline.attrib['sig_logN'] = 0. if (len(isplit[5 + ioff].strip()) > 0) & (isplit[5 + ioff].strip() != '\\ldots'): if isplit[5 + ioff][0] == '\\': ipos = isplit[5 + ioff].find(' ') newline.attrib['logN'] = float(isplit[5 + ioff][ipos + 1:]) newline.attrib['flag_N'] = 2 elif isplit[5 + ioff][0] == '<': ipos = 0 newline.attrib['logN'] = float(isplit[5 + ioff][ipos + 1:]) newline.attrib['flag_N'] = 3 elif isplit[5 + ioff][0] == '1': try: newline.attrib['logN'] = float(isplit[5 + ioff][0:5]) except ValueError: pdb.set_trace() newline.attrib['flag_N'] = 1 try: newline.attrib['sig_logN'] = Nsig[ionc] except KeyError: print('No error for {:s}'.format(ionc)) else: raise ValueError('Bad character') # ion_dict ion_dict[ionc] = dict(clm=newline.attrib['logN'], sig_clm=newline.attrib['sig_logN'], flg_clm=newline.attrib['flag_N'], Z=Zion[0], ion=Zion[1]) # Append lls.lines.append(newline) # Fix NI, OI ion_dict['O I']['clm'] = 14.47 ion_dict['O I']['sig_clm'] = 0.05 ion_dict['N I']['flg_clm'] = 3 lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Jen05') # Return return lls
def meiring07(): """Meiring et al. 2007, MNRAS, 376, 557 SLLS with Magellan Abundances from Table 11 from astro-ph (LateX) by JXP [AODM] RA/DEC from Table 1 """ all_lls = [] # Table 1 tab_fil = pyigm_path+"/data/LLS/Literature/meiring07.tb1.ascii" with open(tab_fil,'r') as f: flines1 = f.readlines() # Grab RA/DEC qso_dict = {} for iline in flines1: if iline[0:2] in ['QS','\h','$\\', 'J2']: continue # Parse isplit = iline.split('&') if '-' not in isplit[3]: sgn = '+' else: sgn = '' radec = isplit[2].strip()+sgn+isplit[3].strip() radec = radec.replace(':','') # zem if isplit[0].strip() != 'Q0826-2230': zem = float(isplit[5].strip()) else: zem = 0.911 # Save qso_dict[isplit[0].strip()] = dict(radec=radec, zem=zem, vlim=[-500.,500]*u.km/u.s) # Abundances (AODM) # Table 11 tab_fil = pyigm_path+"/data/LLS/Literature/meiring07.tb11.ascii" with open(tab_fil,'r') as f: flines11 = f.readlines() # for iline in flines11: if iline[0:2] in ['\h',' ']: continue # Parse isplit = iline.split('&') # Ions if iline[0:2] == 'QS': ioncs = [] Zions = [] for iis in isplit[3:-1]: # Skipping HI # Parse is2 = iis.split('\\') ip2 = is2[2].find('}') ionc = is2[1][2:].strip()+' '+is2[2][0:ip2].strip() # Zion Zion = ltai.name_ion(ionc) # Append ioncs.append(ionc) Zions.append(Zion) continue if iline[0] == 'Q': # QSO qso = isplit[0].strip() # zabs and name zabs = float(isplit[1].strip()) qso_dict[qso]['name']=qso+'z_{:.3f}'.format(zabs) qso_dict[qso]['zabs']=zabs # NHI is2 = isplit[2].strip() qso_dict[qso]['NHI'] = float(is2[0:5]) #if qso_dict[qso]['NHI'] >= 20.3: # print('Uh oh. DLA') qso_dict[qso]['sig_NHI'] = np.array([float(is2[10:])]*2) # Generate LLS lls = LLSSystem(**qso_dict[qso]) continue else: # ADOM Columns ion_dict = {} for kk,iis in enumerate(isplit[3:-1]): is2 = iis.strip() if is2[0:3] == '$>$': ion_dict[ioncs[kk]] = dict(sig_clm=0.,flg_clm=2,Z=Zions[kk][0],ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[3:]) elif is2[0:3] == '$<$': ion_dict[ioncs[kk]] = dict(sig_clm=0.,flg_clm=3,Z=Zions[kk][0],ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[3:]) elif len(is2) == 0: pass else: ion_dict[ioncs[kk]] = dict(flg_clm=1,Z=Zions[kk][0],ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[0:5]) ion_dict[ioncs[kk]]['sig_clm'] = float(is2[10:]) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Mei07') all_lls.append(lls) # Return SLLS only fin_slls = [ills for ills in all_lls if ills.NHI < 20.3] return fin_slls
def insert_dlas(spec, zem, fNHI=None, rstate=None, slls=False, mix=False, high=False, low_s2n=False, noise_boost=4.): """ Insert a DLA into input spectrum Also adjusts the noise Will also add noise 'everywhere' if requested Parameters ---------- spec fNHI rstate low_s2n : bool, optional Reduce the S/N everywhere. By a factor of noise_boost noise_boost : float, optional Factor to *increase* the noise by Returns ------- final_spec : XSpectrum1D dlas : list List of DLAs inserted """ from pyigm.fN import dla as pyi_fd from pyigm.abssys.dla import DLASystem from pyigm.abssys.lls import LLSSystem from pyigm.abssys.utils import hi_model # Init if rstate is None: rstate = np.random.RandomState() if fNHI is None: fNHI = init_fNHI(slls=slls, mix=mix, high=high) # Allowed redshift placement ## Cut on zem and 910A rest-frame zlya = spec.wavelength.value / 1215.67 - 1 dz = np.roll(zlya, -1) - zlya dz[-1] = dz[-2] gdz = (zlya < zem) & (spec.wavelength > 910. * u.AA * (1 + zem)) # l(z) -- Uses DLA for SLLS too which is fine lz = pyi_fd.lX(zlya[gdz], extrap=True, calc_lz=True) cum_lz = np.cumsum(lz * dz[gdz]) tot_lz = cum_lz[-1] fzdla = interpolate.interp1d(cum_lz / tot_lz, zlya[gdz], bounds_error=False, fill_value=np.min(zlya[gdz])) # # n DLA nDLA = 0 while nDLA == 0: nval = rstate.poisson(tot_lz, 100) gdv = nval > 0 if np.sum(gdv) == 0: continue else: nDLA = nval[np.where(gdv)[0][0]] # Generate DLAs dlas = [] for jj in range(nDLA): # Random z zabs = float(fzdla(rstate.random_sample())) # Random NHI NHI = float(fNHI(rstate.random_sample())) if (slls or mix): dla = LLSSystem((0., 0), zabs, None, NHI=NHI) else: dla = DLASystem((0., 0), zabs, (None, None), NHI) dlas.append(dla) # Insert vmodel, _ = hi_model(dlas, spec, fwhm=3., llist=llist) # Add noise rand = rstate.randn(spec.npix) noise = rand * spec.sig * (1 - vmodel.flux.value) # More noise?? if low_s2n: rand2 = rstate.randn(spec.npix) more_noise = noise_boost * rand2 * spec.sig noise += more_noise else: s2n_boost = 1. final_spec = XSpectrum1D.from_tuple( (vmodel.wavelength, spec.flux.value * vmodel.flux.value + noise, noise_boost * spec.sig)) # Return return final_spec, dlas
def tripp2005(): '''Tripp, T. et al. 2005, ApJ, 2005, 619, 714 PG 1216+069 (LLS in Virgo) HST/STIS, FUSE Metal columns parsed from Tables 2 and 3 Total NHI from damping wings M/H from O/H ''' # Grab ASCII files from ApJ tab_fils = [pyigm_path+"/data/LLS/tripp2005.tb3.ascii", pyigm_path+"/data/LLS/tripp2005.tb2.ascii"] urls = ['http://iopscience.iop.org/0004-637X/619/2/714/fulltext/60797.tb3.txt', 'http://iopscience.iop.org/0004-637X/619/2/714/fulltext/60797.tb2.txt'] for jj,tab_fil in enumerate(tab_fils): chk_fil = glob.glob(tab_fil) if len(chk_fil) > 0: tab_fil = chk_fil[0] else: url = urls[jj] print('LLSSurvey: Grabbing table file from {:s}'.format(url)) f = urllib2.urlopen(url) with open(tab_fil, "wb") as code: code.write(f.read()) # Setup radec = '121920.9320+063838.476' # SIMBAD lls = LLSSystem(name='PG1216+069_z0.006', radec=radec, zem=0.3313, zabs=0.00632, vlim=[-100., 100.]*u.km/u.s, NHI=19.32, ZH=-1.6, sig_NHI=np.array([0.03, 0.03])) # Columns # Start with Table 3 (VPFIT) with open(tab_fils[0],'r') as f: flines3 = f.readlines() ion_dict = {} for iline in flines3: if (len(iline.strip()) == 0): continue isplit = iline.split('\t') # Ion flg = 2 if (len(isplit[0].strip()) > 0):# & (isplit[0][0] not in ['1','2']): ipos = isplit[0].find('1') ionc = isplit[0][0:ipos-1].strip() try: Zion = ltai.name_ion(ionc) except KeyError: pdb.set_trace() flg = 1 # Column csplit = isplit[3].split(' ') clm = float(csplit[0]) sig = float(csplit[2]) if flg == 1: ion_dict[ionc] = dict(logN=clm, sig_logN=sig, flag_N=1, Z=Zion[0], ion=Zion[1]) else: # Add it in tmp_dict = dict(logN=clm, sig_logN=sig, flag_N=1, Z=Zion[0], ion=Zion[1]) flagN, logN, siglogN = ltaa.sum_logN(ion_dict[ionc], tmp_dict) ion_dict[ionc]['logN'] = logN ion_dict[ionc]['sig_logN'] = siglogN ions = ion_dict.keys() # Now Table 2 for the extras with open(tab_fils[1],'r') as f: flines2 = f.readlines() # Trim the first 10 lines flines2 = flines2[10:] # Loop for iline in flines2: isplit = iline.split('\t') # ionc = isplit[0].strip() if (len(ionc) == 0) or (ionc in ions): continue # Zion = ltai.name_ion(ionc) ion_dict[ionc] = dict(Z=Zion[0], ion=Zion[1], sig_logN=0.) if isplit[4][0] == '<': ion_dict[ionc]['logN'] = float(isplit[4][1:]) ion_dict[ionc]['flag_N'] = 3 else: raise ValueError('Should not get here') # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Tri05') return lls
def dessauges09(): '''Dessauges-Zavadsky et al. 2009, MNRAS, 396, L96 SLLS with UVES Zn,Fe abundances from Table 1 from astro-ph (LateX) by JXP [AODM] Taken from the Zn/H and Fe/H assuming *no* ionization corrections RA/DEC from the 'other' name ''' # Solar abundances eZn = 4.63 eFe = 7.45 sol = [eFe, eZn] # all_lls = [] # Table 1 tab_fil = pyigm_path + "/data/LLS/Literature/dessauges09.tb1.ascii" with open(tab_fil, 'r') as f: flines1 = f.readlines() # Trim the first few lines flines1 = flines1[3:] for iline in flines1: # Parse isplit = iline.split('&') # QSO if iline[0:2] == 'QS': # QSO, RA/DEC, zem qso = isplit[0][4:].strip() radec = isplit[1].strip()[1:].replace('$', '') zem = float(isplit[3].strip()) # NHI, zabs zabs = float(isplit[4].strip()) is2 = isplit[6].strip() NHI = float(is2[1:6]) sigNHI = np.array([float(is2[10:14])] * 2) # name name = qso + 'z_{:.3f}'.format(zabs) lls = LLSSystem(name=name, radec=radec, vlim=[-500, 500] * u.km / u.s, zem=zem, zabs=zabs, NHI=NHI, sig_NHI=sigNHI) # ADOM Columns ion_dict = {} for kk, ion in enumerate(['Fe II', 'Zn II']): Zion = ltai.name_ion(ion) is2 = isplit[7 + kk].strip() if is2[0:2] == '$>': ion_dict[ion] = dict(sig_clm=0., flg_clm=2, Z=Zion[0], ion=Zion[1]) ion_dict[ion]['clm'] = float(is2[2:7]) + NHI - 12 + sol[kk] elif is2[0:2] == '$<': ion_dict[ion] = dict(sig_clm=0., flg_clm=3, Z=Zion[0], ion=Zion[1]) ion_dict[ion]['clm'] = float(is2[2:7]) + NHI - 12 + sol[kk] elif is2[0:2] == '..': pass else: ion_dict[ion] = dict(flg_clm=1, Z=Zion[0], ion=Zion[1]) ion_dict[ion]['clm'] = float(is2[1:6]) + NHI - 12 + sol[kk] ion_dict[ion]['sig_clm'] = float(is2[10:14]) #xdb.set_trace() # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('DZ09') all_lls.append(lls) # Return SLLS only fin_slls = [ills for ills in all_lls if ills.NHI < 20.3] return fin_slls
def auto_plls(self,x,y): '''Automatically fit a pLLS Parameters: ---------- x,y: floats x,y values in the GUI ''' spec = self.spec_widg.spec # For convenience if len(self.abssys_widg.all_abssys) > 0: conti= self.full_model else: conti= self.continuum # Generate toy LLS from click ximn = np.argmin(np.abs(spec.dispersion.value-x)) NHI = 17.29 + np.log10(-1.*np.log(y/conti.flux.value[ximn])) #QtCore.pyqtRemoveInputHook() #xdb.set_trace() #QtCore.pyqtRestoreInputHook() #print('NHI={:g}'.format(NHI)) z = x/(911.7)-1 plls = LLSSystem((0*u.deg,0*u.deg),z,[-300.,300]*u.km/u.s,NHI=NHI) plls.bval = 20*u.km/u.s plls.fill_lls_lines(bval=20*u.km/u.s, do_analysis=0) # wrest, Tau model, flux wrest = spec.dispersion/(1+plls.zabs) tau = igmlls.tau_multi_lls(spec.dispersion,[plls]) emtau = np.exp(-1. * tau) lls_flux = lsc.convolve_psf(emtau, 3.) #xdb.xplot(wrest, lls_flux) # zmin (next highest LLS or zem) if len(self.abssys_widg.all_abssys) != 0: zlls = [lls.zabs for lls in self.abssys_widg.all_abssys if lls.zabs > plls.zabs] if len(zlls) == 0: zmin = self.zqso+0.01 else: zmin = np.min(np.array(zlls)) - 0.01 else: zmin = self.zqso+0.01 # Pixels for analysis and rolling # NEED TO CUT ON X-Shooter ARM apix = np.where( (wrest > 914*u.AA) & #(spec.dispersion<5600*u.AA) & (spec.dispersion<(1+zmin)*1026.*u.AA))[0] # Might go to Lyb nroll = (np.argmin(np.abs(spec.dispersion-(911.7*u.AA*(1+zmin))))- # Extra 0.01 for bad z np.argmin(np.abs(spec.dispersion-(911.7*u.AA*(1+plls.zabs))))) # Require nroll does not exceed length of spectrum if np.max(apix)+nroll > len(spec.dispersion): nroll = len(spec.dispersion) - np.max(apix) - 1 gdpix = np.arange(np.min(apix)-nroll,np.max(apix)+nroll+1) roll_flux = np.concatenate([np.ones(nroll),lls_flux[apix], np.ones(nroll)]) roll_msk = roll_flux < 0.7 # Generate data arrays wave_pad = spec.dispersion[gdpix] #QtCore.pyqtRemoveInputHook() #xdb.set_trace() #QtCore.pyqtRestoreInputHook() flux_pad = spec.flux[gdpix] sig_pad = spec.sig[gdpix] if len(self.abssys_widg.all_abssys) > 0: conti_pad = conti.flux[gdpix] else: conti_pad = conti.flux[gdpix] # Generate matricies flux_matrix = np.zeros((len(roll_flux),nroll)) sig_matrix = np.zeros((len(roll_flux),nroll)) conti_matrix = np.zeros((len(roll_flux),nroll)) roll_matrix = np.zeros((len(roll_flux),nroll)) mask_matrix = np.zeros((len(roll_flux),nroll)) for kk in range(nroll): roll_matrix[:,kk] = np.roll(roll_flux,kk) mask_matrix[:,kk] = np.roll(roll_msk,kk) flux_matrix[:,kk] = flux_pad conti_matrix[:,kk] = conti_pad sig_matrix[:,kk] = sig_pad # Model -- Multiply by continuum model = roll_matrix * conti_matrix # Condition idx = np.where( (model < (flux_matrix-sig_matrix*1.5)) & (mask_matrix==True)) bad_matrix = np.zeros((len(roll_flux),nroll)) bad_matrix[idx] = 1 # Sum on offsets and get redshift bad = np.sum(bad_matrix,0) ibest = np.argmin(bad) zbest = spec.dispersion[ibest+ximn]/(911.7*u.AA)-1 # Quantity # Add pLLS? if bad[ibest] < 10: #QtCore.pyqtRemoveInputHook() #xdb.set_trace() #QtCore.pyqtRestoreInputHook() self.add_LLS(zbest.value, bval=20.*u.km/u.s, NHI=NHI) else: print('No viable pLLS found with our criteria!')
def meiring09(): '''Meiring et al. 2009, MNRAS, 393, 1513 SLLS with Magellan Abundances from Table 3 from astro-ph (LateX) by JXP [AODM] RA/DEC from Table 1 ''' all_lls = [] # Table 1 tab_fil = pyigm_path+"/data/LLS/Literature/meiring09.tb1.ascii" with open(tab_fil,'r') as f: flines1 = f.readlines() # Grab RA/DEC qso_dict = {} for iline in flines1: if iline[0:3] in [' QS','\hl','$\\c', ' J2', ' ']: continue # Parse isplit = iline.split('&') #xdb.set_trace() if '$' in isplit[3].strip(): isplit[3] = '-'+(isplit[3].strip())[3:] radec = isplit[2].strip()+isplit[3].strip() radec = radec.replace(':','') # zem zem = float(isplit[5].strip()) # Save qso_dict[isplit[0].strip()] = dict(radec=radec,zem=zem, vlim=[-500,500.]*u.km/u.s) # Abundances (AODM) # Table 3 tab_fil = pyigm_path+"/data/LLS/Literature/meiring09.tb3.ascii" with open(tab_fil,'r') as f: flines3 = f.readlines() # for iline in flines3: if iline[0:2] in ['\h',' ']: continue # Parse isplit = iline.split('&') # Ions if iline[0:2] == 'QS': ioncs = [] Zions = [] for iis in isplit[3:-1]: # Skipping HI # Parse #is2 = iis.split('\\') #ip2 = is2[2].find('}') ionc = iis.strip() # Zion Zion = ltai.name_ion(ionc) # Append ioncs.append(ionc) Zions.append(Zion) continue if iline[0] == 'Q': # QSO qso = isplit[0].strip() if qso[-1] in ['A','B','C']: qso = qso[0:-1] # zabs and name zabs = float(isplit[1].strip()) qso_dict[qso]['name']=qso+'z_{:.3f}'.format(zabs) qso_dict[qso]['zabs']=zabs # NHI is2 = isplit[2].strip() if is2[0] == '$': qso_dict[qso]['NHI'] = 99.99 # THROW OUT Q1436-0051B qso_dict[qso]['sig_NHI'] = np.array([0.,0.]) else: qso_dict[qso]['NHI'] = float(is2[0:5]) qso_dict[qso]['sig_NHI'] = np.array([float(is2[10:])]*2) #if qso_dict[qso]['NHI'] >= 20.3: # print('Uh oh. DLA') # Generate LLS lls = LLSSystem(**qso_dict[qso]) continue else: # ADOM Columns ion_dict = {} for kk,iis in enumerate(isplit[3:-1]): is2 = iis.strip() if is2[0:3] == '$>$': ion_dict[ioncs[kk]] = dict(sig_clm=0.,flg_clm=2,Z=Zions[kk][0],ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[3:]) elif is2[0:3] == '$<$': ion_dict[ioncs[kk]] = dict(sig_clm=0.,flg_clm=3,Z=Zions[kk][0],ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[3:]) elif len(is2) == 0: pass else: ion_dict[ioncs[kk]] = dict(flg_clm=1,Z=Zions[kk][0],ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[0:5]) ion_dict[ioncs[kk]]['sig_clm'] = float(is2[10:]) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Mei09') all_lls.append(lls) # Return SLLS only fin_slls = [ills for ills in all_lls if ills.NHI < 20.3] return fin_slls
def __init__(self, abssys_list, parent=None, only_one=False, linelist=None, no_buttons=False): ''' only_one: bool, optional Restrict to one selection at a time? [False] no_buttons: bool, optional Eliminate Refine/Reload buttons? ''' super(AbsSysWidget, self).__init__(parent) #if not status is None: # self.statusBar = status self.abssys_list = abssys_list # Speeds things up if linelist is None: self.linelist = LineList('ISM') else: self.linelist = linelist # Create the line list list_label = QtGui.QLabel('Abs Systems:') self.abslist_widget = QtGui.QListWidget(self) if not only_one: self.abslist_widget.setSelectionMode( QtGui.QAbstractItemView.ExtendedSelection) self.abslist_widget.addItem('None') #self.abslist_widget.addItem('Test') # Lists self.abs_sys = [] self.items = [] self.all_items = [] self.all_abssys = [] for abssys_fil in self.abssys_list: self.all_abssys.append( LLSSystem.from_absid_fil(abssys_fil, linelist=self.linelist)) self.add_item(abssys_fil) self.abslist_widget.setCurrentRow(0) self.abslist_widget.itemSelectionChanged.connect(self.on_list_change) # Layout vbox = QtGui.QVBoxLayout() vbox.addWidget(list_label) # Buttons if not no_buttons: buttons = QtGui.QWidget() self.refine_button = QtGui.QPushButton('Refine', self) #self.refine_button.clicked.connect(self.refine) # CONNECTS TO A PARENT reload_btn = QtGui.QPushButton('Reload', self) reload_btn.clicked.connect(self.reload) hbox1 = QtGui.QHBoxLayout() hbox1.addWidget(self.refine_button) hbox1.addWidget(reload_btn) buttons.setLayout(hbox1) vbox.addWidget(buttons) vbox.addWidget(self.abslist_widget) self.setLayout(vbox)
def jenkins2005(): """Jenkins, E. et al. 2005, ApJ, 2005, 623, 767 PHL 1811 HST/STIS, FUSE Metals parsed from Table 1 OI taken from text Had to input error on columns by hand (JXP) Total NHI from Lyman series. see Fig 3 M/H from O/H """ # Grab ASCII file from ApJ tab_fil = pyigm_path+"/data/LLS/Literature/jenkins2005.tb1.ascii" chk_fil = glob.glob(tab_fil) if len(chk_fil) > 0: tab_fil = chk_fil[0] else: url = 'http://iopscience.iop.org/0004-637X/623/2/767/fulltext/61520.tb1.txt' print('LLSSurvey: Grabbing table file from {:s}'.format(url)) f = urllib2.urlopen(url) with open(tab_fil, "wb") as code: code.write(f.read()) # Setup radec = '215501.5152-092224.688' # SIMBAD lls = LLSSystem(name='PHL1811_z0.081', radec=radec, zem=0.192, zabs=0.080923, vlim=[-100., 100.]*u.km/u.s, NHI=17.98, ZH=-0.19, sig_NHI=np.array([0.05,0.05])) lls.lines = [] # Probably not used # AbsLines ism = LineList('ISM') Nsig = {'C IV': 0.4, 'N II': 0.4, 'Si II': 0.05, 'Si IV': 0.25, 'S II': 0.2, 'Fe II': 0.12, 'H I': 0.05, 'S III': 0.06} # Parse Table with open(tab_fil,'r') as f: flines = f.readlines() ion_dict = {} for iline in flines: iline = iline.strip() if (len(iline) == 0): continue # Split on tabs isplit = iline.split('\t') # Offset? ioff = 0 if isplit[0][0] in ['1','2']: ioff = -1 # Catch bad lines if (isplit[1+ioff][0:6] in ['1442.0','1443.7','1120.9']): # Skip goofy CII line and CII* continue if len(isplit[2+ioff]) == 0: continue # Ion if (len(isplit[0].strip()) > 0) & (isplit[0][0] not in ['1','2']): ionc = isplit[0].strip() try: Zion = ltai.name_ion(ionc) except KeyError: pdb.set_trace() # Generate the Line try: newline = AbsLine(float(isplit[2+ioff])*u.AA,linelist=ism, closest=True) except ValueError: pdb.set_trace() newline.attrib['z'] = lls.zabs # Spectrum newline.analy['datafile'] = 'STIS' if 'S' in isplit[1] else 'FUSE' # EW try: EWvals = isplit[4+ioff].split(' ') except IndexError: pdb.set_trace() newline.attrib['EW'] = float(EWvals[0])*u.AA/1e3 newline.attrib['sig_EW'] = float(EWvals[2])*u.AA/1e3 newline.attrib['flag_EW'] = 1 if len(isplit) < (5+ioff+1): continue # Colm? #xdb.set_trace() newline.attrib['sig_logN'] = 0. if (len(isplit[5+ioff].strip()) > 0) & (isplit[5+ioff].strip() != '\\ldots'): if isplit[5+ioff][0] == '\\': ipos = isplit[5+ioff].find(' ') newline.attrib['logN'] = float(isplit[5+ioff][ipos+1:]) newline.attrib['flag_N'] = 2 elif isplit[5+ioff][0] == '<': ipos = 0 newline.attrib['logN'] = float(isplit[5+ioff][ipos+1:]) newline.attrib['flag_N'] = 3 elif isplit[5+ioff][0] == '1': try: newline.attrib['logN'] = float(isplit[5+ioff][0:5]) except ValueError: pdb.set_trace() newline.attrib['flag_N'] = 1 try: newline.attrib['sig_logN'] = Nsig[ionc] except KeyError: print('No error for {:s}'.format(ionc)) else: raise ValueError('Bad character') # ion_dict ion_dict[ionc] = dict(clm=newline.attrib['logN'], sig_clm=newline.attrib['sig_logN'], flg_clm=newline.attrib['flag_N'], Z=Zion[0], ion=Zion[1]) # Append lls.lines.append(newline) # Fix NI, OI ion_dict['O I']['clm'] = 14.47 ion_dict['O I']['sig_clm'] = 0.05 ion_dict['N I']['flg_clm'] = 3 lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Jen05') # Return return lls
def tumlinson11(): """Tumlinson, J. et al. 2011, ApJ, 733, 111 J1009+0713 HST/COS Metal columns parsed from Table 1 NHI from LL+Lyman series (uncertain) """ # Grab ASCII file from ApJ tab_fil = pyigm_path+"/data/LLS/Literature/tumlinson11.tb1.ascii" url = 'http://iopscience.iop.org/0004-637X/733/2/111/suppdata/apj388927t1_ascii.txt' chk_fil = glob.glob(tab_fil) if len(chk_fil) > 0: tab_fil = chk_fil[0] else: print('LLSSurvey: Grabbing table file from {:s}'.format(url)) f = urllib2.urlopen(url) with open(tab_fil, "wb") as code: code.write(f.read()) # Setup radec = '100902.06+071343.8' # From paper lls = LLSSystem(name='J1009+0713_z0.356', radec=radec, zem=0.456, zabs=0.3558, vlim=[-200., 250.]*u.km/u.s, NHI=18.4, sig_NHI=np.array([0.41,0.41])) # Columns # Start with Table 3 (VPFIT) with open(tab_fil,'r') as f: flines1 = f.readlines() # Trim flines1 = flines1[18:] # ion_dict = {} line_dict = dict(OI='1302',OVI='1038',MgII='2803^b',SiII='1190', CaII='3934',FeII='2586') ion = None for iline in flines1: isplit = iline.split('\t') if ion=='FeIII': # Last line break # Ion is2 = isplit[0].split(' ') ion = is2[0]+is2[1] try: gdl = line_dict[ion] except: pass #print('Taking {:s}'.format(isplit[0])) else: if is2[2] != gdl: continue Zion = ltai.name_ion(ion) ion_dict[ion] = dict(logN=0., sig_logN=0., flag_N=0, Z=Zion[0],ion=Zion[1]) # Combine components [could replace with SubSystems some day] for iis in isplit[1:-1]: # Upper limit if (iis.strip()[0] == '<') & (ion_dict[ion]['flag_N']==0): ion_dict[ion]['flag_N']=3 ion_dict[ion]['logN']=float(iis[1:]) elif (iis.strip()[0] == '>'): # Saturated ion_dict[ion]['flag_N']=2 ion_dict[ion]['logN']=log_sum([ion_dict[ion]['logN'],float(iis[1:5])]) elif iis.strip()[0] in ['.','<']: pass else: if ion_dict[ion]['flag_N']==2: # Add to saturated ion_dict[ion]['logN']=log_sum([ion_dict[ion]['logN'],float(iis[0:4])]) else: ion_dict[ion]['flag_N']=1 obj = dict(logN=float(iis[0:4]),sig_logN=float(iis[-4:]), flag_N=1) # Add flag,N,sig = ltaa.sum_logN(ion_dict[ion],obj) ion_dict[ion]['logN']=N ion_dict[ion]['sig_logN']=sig # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Tum11') return lls
def meiring07(): """Meiring et al. 2007, MNRAS, 376, 557 SLLS with Magellan Abundances from Table 11 from astro-ph (LateX) by JXP [AODM] RA/DEC from Table 1 """ all_lls = [] # Table 1 tab_fil = pyigm_path + "/data/LLS/Literature/meiring07.tb1.ascii" with open(tab_fil, 'r') as f: flines1 = f.readlines() # Grab RA/DEC qso_dict = {} for iline in flines1: if iline[0:2] in ['QS', '\h', '$\\', 'J2']: continue # Parse isplit = iline.split('&') if '-' not in isplit[3]: sgn = '+' else: sgn = '' radec = isplit[2].strip() + sgn + isplit[3].strip() radec = radec.replace(':', '') # zem if isplit[0].strip() != 'Q0826-2230': zem = float(isplit[5].strip()) else: zem = 0.911 # Save qso_dict[isplit[0].strip()] = dict(radec=radec, zem=zem, vlim=[-500., 500] * u.km / u.s) # Abundances (AODM) # Table 11 tab_fil = pyigm_path + "/data/LLS/Literature/meiring07.tb11.ascii" with open(tab_fil, 'r') as f: flines11 = f.readlines() # for iline in flines11: if iline[0:2] in ['\h', ' ']: continue # Parse isplit = iline.split('&') # Ions if iline[0:2] == 'QS': ioncs = [] Zions = [] for iis in isplit[3:-1]: # Skipping HI # Parse is2 = iis.split('\\') ip2 = is2[2].find('}') ionc = is2[1][2:].strip() + ' ' + is2[2][0:ip2].strip() # Zion Zion = ltai.name_ion(ionc) # Append ioncs.append(ionc) Zions.append(Zion) continue if iline[0] == 'Q': # QSO qso = isplit[0].strip() # zabs and name zabs = float(isplit[1].strip()) qso_dict[qso]['name'] = qso + 'z_{:.3f}'.format(zabs) qso_dict[qso]['zabs'] = zabs # NHI is2 = isplit[2].strip() qso_dict[qso]['NHI'] = float(is2[0:5]) #if qso_dict[qso]['NHI'] >= 20.3: # print('Uh oh. DLA') qso_dict[qso]['sig_NHI'] = np.array([float(is2[10:])] * 2) # Generate LLS lls = LLSSystem(**qso_dict[qso]) continue else: # ADOM Columns ion_dict = {} for kk, iis in enumerate(isplit[3:-1]): is2 = iis.strip() if is2[0:3] == '$>$': ion_dict[ioncs[kk]] = dict(sig_clm=0., flg_clm=2, Z=Zions[kk][0], ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[3:]) elif is2[0:3] == '$<$': ion_dict[ioncs[kk]] = dict(sig_clm=0., flg_clm=3, Z=Zions[kk][0], ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[3:]) elif len(is2) == 0: pass else: ion_dict[ioncs[kk]] = dict(flg_clm=1, Z=Zions[kk][0], ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[0:5]) ion_dict[ioncs[kk]]['sig_clm'] = float(is2[10:]) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Mei07') all_lls.append(lls) # Return SLLS only fin_slls = [ills for ills in all_lls if ills.NHI < 20.3] return fin_slls
def dessauges09(): '''Dessauges-Zavadsky et al. 2009, MNRAS, 396, L96 SLLS with UVES Zn,Fe abundances from Table 1 from astro-ph (LateX) by JXP [AODM] Taken from the Zn/H and Fe/H assuming *no* ionization corrections RA/DEC from the 'other' name ''' # Solar abundances eZn = 4.63 eFe = 7.45 sol = [eFe,eZn] # all_lls = [] # Table 1 tab_fil = pyigm_path+"/data/LLS/Literature/dessauges09.tb1.ascii" with open(tab_fil,'r') as f: flines1 = f.readlines() # Trim the first few lines flines1 = flines1[3:] for iline in flines1: # Parse isplit = iline.split('&') # QSO if iline[0:2] == 'QS': # QSO, RA/DEC, zem qso = isplit[0][4:].strip() radec = isplit[1].strip()[1:].replace('$','') zem = float(isplit[3].strip()) # NHI, zabs zabs = float(isplit[4].strip()) is2 = isplit[6].strip() NHI = float(is2[1:6]) sigNHI = np.array([float(is2[10:14])]*2) # name name = qso+'z_{:.3f}'.format(zabs) lls = LLSSystem(name=name, radec=radec, vlim=[-500,500]*u.km/u.s, zem=zem, zabs=zabs, NHI=NHI, sig_NHI=sigNHI) # ADOM Columns ion_dict = {} for kk,ion in enumerate(['Fe II','Zn II']): Zion = ltai.name_ion(ion) is2 = isplit[7+kk].strip() if is2[0:2] == '$>': ion_dict[ion] = dict(sig_clm=0.,flg_clm=2,Z=Zion[0],ion=Zion[1]) ion_dict[ion]['clm'] = float(is2[2:7]) + NHI - 12 + sol[kk] elif is2[0:2] == '$<': ion_dict[ion] = dict(sig_clm=0.,flg_clm=3,Z=Zion[0],ion=Zion[1]) ion_dict[ion]['clm'] = float(is2[2:7]) + NHI - 12 + sol[kk] elif is2[0:2] == '..': pass else: ion_dict[ion] = dict(flg_clm=1,Z=Zion[0],ion=Zion[1]) ion_dict[ion]['clm'] = float(is2[1:6]) + NHI - 12 + sol[kk] ion_dict[ion]['sig_clm'] = float(is2[10:14]) #xdb.set_trace() # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('DZ09') all_lls.append(lls) # Return SLLS only fin_slls = [ills for ills in all_lls if ills.NHI < 20.3] return fin_slls
def meiring09(): '''Meiring et al. 2009, MNRAS, 393, 1513 SLLS with Magellan Abundances from Table 3 from astro-ph (LateX) by JXP [AODM] RA/DEC from Table 1 ''' all_lls = [] # Table 1 tab_fil = pyigm_path + "/data/LLS/Literature/meiring09.tb1.ascii" with open(tab_fil, 'r') as f: flines1 = f.readlines() # Grab RA/DEC qso_dict = {} for iline in flines1: if iline[0:3] in [' QS', '\hl', '$\\c', ' J2', ' ']: continue # Parse isplit = iline.split('&') #xdb.set_trace() if '$' in isplit[3].strip(): isplit[3] = '-' + (isplit[3].strip())[3:] radec = isplit[2].strip() + isplit[3].strip() radec = radec.replace(':', '') # zem zem = float(isplit[5].strip()) # Save qso_dict[isplit[0].strip()] = dict(radec=radec, zem=zem, vlim=[-500, 500.] * u.km / u.s) # Abundances (AODM) # Table 3 tab_fil = pyigm_path + "/data/LLS/Literature/meiring09.tb3.ascii" with open(tab_fil, 'r') as f: flines3 = f.readlines() # for iline in flines3: if iline[0:2] in ['\h', ' ']: continue # Parse isplit = iline.split('&') # Ions if iline[0:2] == 'QS': ioncs = [] Zions = [] for iis in isplit[3:-1]: # Skipping HI # Parse #is2 = iis.split('\\') #ip2 = is2[2].find('}') ionc = iis.strip() # Zion Zion = ltai.name_ion(ionc) # Append ioncs.append(ionc) Zions.append(Zion) continue if iline[0] == 'Q': # QSO qso = isplit[0].strip() if qso[-1] in ['A', 'B', 'C']: qso = qso[0:-1] # zabs and name zabs = float(isplit[1].strip()) qso_dict[qso]['name'] = qso + 'z_{:.3f}'.format(zabs) qso_dict[qso]['zabs'] = zabs # NHI is2 = isplit[2].strip() if is2[0] == '$': qso_dict[qso]['NHI'] = 99.99 # THROW OUT Q1436-0051B qso_dict[qso]['sig_NHI'] = np.array([0., 0.]) else: qso_dict[qso]['NHI'] = float(is2[0:5]) qso_dict[qso]['sig_NHI'] = np.array([float(is2[10:])] * 2) #if qso_dict[qso]['NHI'] >= 20.3: # print('Uh oh. DLA') # Generate LLS lls = LLSSystem(**qso_dict[qso]) continue else: # ADOM Columns ion_dict = {} for kk, iis in enumerate(isplit[3:-1]): is2 = iis.strip() if is2[0:3] == '$>$': ion_dict[ioncs[kk]] = dict(sig_clm=0., flg_clm=2, Z=Zions[kk][0], ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[3:]) elif is2[0:3] == '$<$': ion_dict[ioncs[kk]] = dict(sig_clm=0., flg_clm=3, Z=Zions[kk][0], ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[3:]) elif len(is2) == 0: pass else: ion_dict[ioncs[kk]] = dict(flg_clm=1, Z=Zions[kk][0], ion=Zions[kk][1]) ion_dict[ioncs[kk]]['clm'] = float(is2[0:5]) ion_dict[ioncs[kk]]['sig_clm'] = float(is2[10:]) # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Mei09') all_lls.append(lls) # Return SLLS only fin_slls = [ills for ills in all_lls if ills.NHI < 20.3] return fin_slls
def auto_plls(self, x, y): """Automatically fit a pLLS Parameters: ---------- x,y: floats x,y values in the GUI """ spec = self.spec_widg.spec # For convenience if len(self.abssys_widg.all_abssys) > 0: conti = self.full_model else: conti = self.continuum # Generate toy LLS from click ximn = np.argmin(np.abs(spec.wavelength.value - x)) if y > conti.flux.value[ximn]: print("Type F below the continuum fool!") return NHI = 17.29 + np.log10(-1. * np.log(y / conti.flux.value[ximn])) #QtCore.pyqtRemoveInputHook() #pdb.set_trace() #QtCore.pyqtRestoreInputHook() #print('NHI={:g}'.format(NHI)) z = x / (911.7) - 1 plls = LLSSystem((0 * u.deg, 0 * u.deg), z, [-300., 300] * u.km / u.s, NHI=NHI) plls.bval = 20 * u.km / u.s plls.fill_lls_lines(bval=20 * u.km / u.s, do_analysis=0) # wrest, Tau model, flux wrest = spec.wavelength / (1 + plls.zabs) tau = igmlls.tau_multi_lls(spec.wavelength, [plls]) emtau = np.exp(-1. * tau) lls_flux = lsc.convolve_psf(emtau, 3.) #xdb.xplot(wrest, lls_flux) # zmin (next highest LLS or zem) if len(self.abssys_widg.all_abssys) != 0: zlls = [ lls.zabs for lls in self.abssys_widg.all_abssys if lls.zabs > plls.zabs ] if len(zlls) == 0: zmin = self.zqso + 0.05 else: zmin = np.min(np.array(zlls)) - 0.01 else: zmin = self.zqso + 0.05 # Pixels for analysis and rolling # NEED TO CUT ON X-Shooter ARM apix = np.where((wrest > 914 * u.AA) & #(spec.wavelength<5600*u.AA) & (spec.wavelength < (1 + zmin) * 1026. * u.AA))[ 0] # Might go to Lyb nroll = ( np.argmin(np.abs(spec.wavelength - (911.7 * u.AA * (1 + zmin)))) - # Extra 0.01 for bad z np.argmin( np.abs(spec.wavelength - (911.7 * u.AA * (1 + plls.zabs))))) # Require nroll does not exceed length of spectrum if np.max(apix) + nroll > len(spec.wavelength): nroll = len(spec.wavelength) - np.max(apix) - 1 gdpix = np.arange(np.min(apix) - nroll, np.max(apix) + nroll + 1) roll_flux = np.concatenate( [np.ones(nroll), lls_flux[apix], np.ones(nroll)]) roll_msk = roll_flux < 0.7 # Generate data arrays wave_pad = spec.wavelength[gdpix] #QtCore.pyqtRemoveInputHook() #xdb.set_trace() #QtCore.pyqtRestoreInputHook() flux_pad = spec.flux[gdpix] sig_pad = spec.sig[gdpix] if len(self.abssys_widg.all_abssys) > 0: conti_pad = conti.flux[gdpix] else: conti_pad = conti.flux[gdpix] # Generate matricies flux_matrix = np.zeros((len(roll_flux), nroll)) sig_matrix = np.zeros((len(roll_flux), nroll)) conti_matrix = np.zeros((len(roll_flux), nroll)) roll_matrix = np.zeros((len(roll_flux), nroll)) mask_matrix = np.zeros((len(roll_flux), nroll)) for kk in range(nroll): roll_matrix[:, kk] = np.roll(roll_flux, kk) mask_matrix[:, kk] = np.roll(roll_msk, kk) flux_matrix[:, kk] = flux_pad conti_matrix[:, kk] = conti_pad sig_matrix[:, kk] = sig_pad # Model -- Multiply by continuum model = roll_matrix * conti_matrix # Condition idx = np.where((model < (flux_matrix - sig_matrix * 1.5)) & (mask_matrix == True)) bad_matrix = np.zeros((len(roll_flux), nroll)) bad_matrix[idx] = 1 # Sum on offsets and get redshift bad = np.sum(bad_matrix, 0) ibest = np.argmin(bad) zbest = spec.wavelength[ibest + ximn] / (911.7 * u.AA) - 1 # Quantity # Add pLLS? if bad[ibest] < 10: #QtCore.pyqtRemoveInputHook() #xdb.set_trace() #QtCore.pyqtRestoreInputHook() self.add_LLS(zbest.value, bval=20. * u.km / u.s, NHI=NHI) else: print('No viable pLLS found with our criteria!')
def tumlinson11(): """Tumlinson, J. et al. 2011, ApJ, 733, 111 J1009+0713 HST/COS Metal columns parsed from Table 1 NHI from LL+Lyman series (uncertain) """ # Grab ASCII file from ApJ tab_fil = pyigm_path + "/data/LLS/Literature/tumlinson11.tb1.ascii" url = 'http://iopscience.iop.org/0004-637X/733/2/111/suppdata/apj388927t1_ascii.txt' chk_fil = glob.glob(tab_fil) if len(chk_fil) > 0: tab_fil = chk_fil[0] else: print('LLSSurvey: Grabbing table file from {:s}'.format(url)) f = urlopen(url) with open(tab_fil, "wb") as code: code.write(f.read()) # Setup radec = '100902.06+071343.8' # From paper lls = LLSSystem(name='J1009+0713_z0.356', radec=radec, zem=0.456, zabs=0.3558, vlim=[-200., 250.] * u.km / u.s, NHI=18.4, sig_NHI=np.array([0.41, 0.41])) # Columns # Start with Table 3 (VPFIT) with open(tab_fil, 'r') as f: flines1 = f.readlines() # Trim flines1 = flines1[18:] # ion_dict = {} line_dict = dict(OI='1302', OVI='1038', MgII='2803^b', SiII='1190', CaII='3934', FeII='2586') ion = None for iline in flines1: isplit = iline.split('\t') if ion == 'FeIII': # Last line break # Ion is2 = isplit[0].split(' ') ion = is2[0] + is2[1] try: gdl = line_dict[ion] except: pass #print('Taking {:s}'.format(isplit[0])) else: if is2[2] != gdl: continue Zion = ltai.name_ion(ion) ion_dict[ion] = dict(logN=0., sig_logN=0., flag_N=0, Z=Zion[0], ion=Zion[1]) # Combine components [could replace with SubSystems some day] for iis in isplit[1:-1]: # Upper limit if (iis.strip()[0] == '<') & (ion_dict[ion]['flag_N'] == 0): ion_dict[ion]['flag_N'] = 3 ion_dict[ion]['logN'] = float(iis[1:]) elif (iis.strip()[0] == '>'): # Saturated ion_dict[ion]['flag_N'] = 2 ion_dict[ion]['logN'] = log_sum( [ion_dict[ion]['logN'], float(iis[1:5])]) elif iis.strip()[0] in ['.', '<']: pass else: if ion_dict[ion]['flag_N'] == 2: # Add to saturated ion_dict[ion]['logN'] = log_sum( [ion_dict[ion]['logN'], float(iis[0:4])]) else: ion_dict[ion]['flag_N'] = 1 obj = dict(logN=float(iis[0:4]), sig_logN=float(iis[-4:]), flag_N=1) # Add flag, N, sig = ltaa.sum_logN(ion_dict[ion], obj) ion_dict[ion]['logN'] = N ion_dict[ion]['sig_logN'] = sig # Finish lls._ionN = pyiau.dict_to_ions(ion_dict) lls.Refs.append('Tum11') return lls
def load_ml_file(pred_file): """ Load the search results from the CNN into a DLASurvey object Parameters ---------- pred_file Returns ------- ml_llssurvey: LLSSurvey ml_dlasusrvey: DLASurvey """ print("Loading {:s}. Please be patient..".format(pred_file)) # Read ml_results = ltu.loadjson(pred_file) use_platef = False if 'plate' in ml_results[0].keys(): use_platef = True else: if 'id' in ml_results[0].keys(): use_id = True # Init idict = dict(ra=[], dec=[], plate=[], fiber=[]) if use_platef: for key in ['plate', 'fiber', 'mjd']: idict[key] = [] dlasystems = [] llssystems = [] # Generate coords to speed things up for obj in ml_results: for key in ['ra', 'dec']: idict[key].append(obj[key]) ml_coords = SkyCoord(ra=idict['ra'], dec=idict['dec'], unit='deg') ra_names = ml_coords.icrs.ra.to_string(unit=u.hour, sep='', pad=True) dec_names = ml_coords.icrs.dec.to_string(sep='', pad=True, alwayssign=True) vlim = [-500., 500.] * u.km / u.s dcoord = SkyCoord(ra=0., dec=0., unit='deg') # Loop on list didx, lidx = [], [] print("Looping on sightlines..") for tt, obj in enumerate(ml_results): #if (tt % 100) == 0: # print('tt: {:d}'.format(tt)) # Sightline if use_id: plate, fiber = [int(spl) for spl in obj['id'].split('-')] idict['plate'].append(plate) idict['fiber'].append(fiber) # Systems for ss, syskey in enumerate(['dlas', 'subdlas']): for idla in obj[syskey]: name = 'J{:s}{:s}_z{:.3f}'.format(ra_names[tt], dec_names[tt], idla['z_dla']) if ss == 0: isys = DLASystem(dcoord, idla['z_dla'], vlim, NHI=idla['column_density'], zem=obj['z_qso'], name=name) else: isys = LLSSystem(dcoord, idla['z_dla'], vlim, NHI=idla['column_density'], zem=obj['z_qso'], name=name) isys.confidence = idla['dla_confidence'] isys.s2n = idla['s2n'] if use_platef: isys.plate = obj['plate'] isys.fiber = obj['fiber'] elif use_id: isys.plate = plate isys.fiber = fiber # Save if ss == 0: didx.append(tt) dlasystems.append(isys) else: lidx.append(tt) llssystems.append(isys) # Generate sightline tables sightlines = Table() sightlines['RA'] = idict['ra'] sightlines['DEC'] = idict['dec'] sightlines['PLATE'] = idict['plate'] sightlines['FIBERID'] = idict['fiber'] # Surveys ml_llssurvey = LLSSurvey() ml_llssurvey.sightlines = sightlines.copy() ml_llssurvey._abs_sys = llssystems ml_llssurvey.coords = ml_coords[np.array(lidx)] ml_dlasurvey = DLASurvey() ml_dlasurvey.sightlines = sightlines.copy() ml_dlasurvey._abs_sys = dlasystems ml_dlasurvey.coords = ml_coords[np.array(didx)] # Return return ml_llssurvey, ml_dlasurvey
def battisti12(): '''Battisti, A. et al. 2012, ApJ, 744, 93 HST/COS QSO info from Table 1 Metal columns parsed from Table 3 NHI from Lya ''' all_lls = [] # Grab ASCII files from ApJ tab_fils = [pyigm_path+"/data/LLS/Literature/battisti12.tb1.ascii", pyigm_path+"/data/LLS/Literature/battisti12.tb3.ascii"] urls = ['http://iopscience.iop.org/0004-637X/744/2/93/suppdata/apj413924t1_ascii.txt', 'http://iopscience.iop.org/0004-637X/744/2/93/suppdata/apj413924t3_ascii.txt'] for jj,tab_fil in enumerate(tab_fils): chk_fil = glob.glob(tab_fil) if len(chk_fil) > 0: tab_fil = chk_fil[0] else: url = urls[jj] print('LLSSurvey: Grabbing table file from {:s}'.format(url)) f = urllib2.urlopen(url) with open(tab_fil, "wb") as code: code.write(f.read()) # QSO info with open(tab_fils[0],'r') as f: flines1 = f.readlines() # Grab RA/DEC all_idict = [] for iline in flines1: if iline[0:2] != 'SD': continue # Parse isplit = iline.split('\t') name = isplit[0].split(' ')[1] radec = name[1:] zem = float(isplit[1].strip()) zabs = float(isplit[2].strip()) NHI = float(isplit[3].strip()[0:4]) sigNHI = np.array([float(isplit[3].strip()[11:])]*2) # Save lls = LLSSystem(name=name,radec=radec,zem=zem, zabs=zabs,NHI=NHI,sig_NHI=sigNHI, vlim=[-500,500]*u.km/u.s) # all_lls.append(lls) all_idict.append({}) # Abundances with open(tab_fils[1],'r') as f: flines3 = f.readlines() flines3 = flines3[5:] ion = None for iline in flines3: if ion == 'Ni II': break isplit = iline.split('\t') if isplit[0] == 'C II*': # Skipping CII* continue # ion ipos = -1 while (isplit[0][ipos] not in ['I','V']): ipos -= 1 ion = isplit[0][0:ipos+1+len(isplit[0])] Zion = ltai.name_ion(ion) # Loop on systems for kk,iis in enumerate(isplit[1:-1]): if iis.strip()[0] == '.': continue all_idict[kk][ion] = dict(Z=Zion[0], ion=Zion[1],sig_clm=0.) if iis[0] == '>': all_idict[kk][ion]['flg_clm'] = 2 all_idict[kk][ion]['clm'] = float(iis[1:6]) elif iis[0] == '<': all_idict[kk][ion]['flg_clm'] = 3 all_idict[kk][ion]['clm'] = float(iis[1:]) else: all_idict[kk][ion]['flg_clm'] = 1 all_idict[kk][ion]['clm'] = float(iis[0:5]) all_idict[kk][ion]['sig_clm'] = float(iis[-4:]) # Return SLLS only for kk,lls in enumerate(all_lls): try: lls._ionN = pyiau.dict_to_ions(all_idict[kk]) except ValueError: pdb.set_trace() lls.Refs.append('Bat12') fin_slls = [ills for ills in all_lls if ills.NHI < 20.3] return fin_slls
def json_to_sdss_dlasurvey(json_file, sdss_survey, add_pf=True, debug=False): """ Convert JSON output file to a DLASurvey object Assumes SDSS bookkeeping for sightlines (i.e. PLATE, FIBER) Parameters ---------- json_file : str Full path to the JSON results file sdss_survey : DLASurvey SDSS survey, usually human (e.g. JXP for DR5) add_pf : bool, optional Add plate/fiber to DLAs in sdss_survey Returns ------- ml_survey : LLSSurvey Survey object for the LLS """ print("Loading SDSS Survey from JSON file {:s}".format(json_file)) # imports from pyigm.abssys.dla import DLASystem from pyigm.abssys.lls import LLSSystem # Fiber key for fkey in ['FIBER', 'FIBER_ID', 'FIB']: if fkey in sdss_survey.sightlines.keys(): break # Read ml_results = ltu.loadjson(json_file) use_platef = False if 'plate' in ml_results[0].keys(): use_platef = True else: if 'id' in ml_results[0].keys(): use_id = True # Init #idict = dict(plate=[], fiber=[], classification_confidence=[], # FOR v2 # classification=[], ra=[], dec=[]) idict = dict(ra=[], dec=[]) if use_platef: for key in ['plate', 'fiber', 'mjd']: idict[key] = [] ml_tbl = Table() ml_survey = LLSSurvey() systems = [] in_ml = np.array([False]*len(sdss_survey.sightlines)) # Loop for obj in ml_results: # Sightline for key in idict.keys(): idict[key].append(obj[key]) # DLAs #if debug: # if (obj['plate'] == 1366) & (obj['fiber'] == 614): # sv_coord = SkyCoord(ra=obj['ra'], dec=obj['dec'], unit='deg') # print("GOT A MATCH IN RESULTS FILE") for idla in obj['dlas']: """ dla = DLASystem((sdss_survey.sightlines['RA'][mt[0]], sdss_survey.sightlines['DEC'][mt[0]]), idla['spectrum']/(1215.6701)-1., None, idla['column_density']) """ if idla['z_dla'] < 1.8: continue isys = LLSSystem((obj['ra'],obj['dec']), idla['z_dla'], None, NHI=idla['column_density'], zem=obj['z_qso']) isys.confidence = idla['dla_confidence'] if use_platef: isys.plate = obj['plate'] isys.fiber = obj['fiber'] elif use_id: plate, fiber = [int(spl) for spl in obj['id'].split('-')] isys.plate = plate isys.fiber = fiber # Save systems.append(isys) # Connect to sightlines ml_coord = SkyCoord(ra=idict['ra'], dec=idict['dec'], unit='deg') s_coord = SkyCoord(ra=sdss_survey.sightlines['RA'], dec=sdss_survey.sightlines['DEC'], unit='deg') idx, d2d, d3d = match_coordinates_sky(s_coord, ml_coord, nthneighbor=1) used = d2d < 1.*u.arcsec for iidx in np.where(~used)[0]: print("Sightline RA={:g}, DEC={:g} was not used".format(sdss_survey.sightlines['RA'][iidx], sdss_survey.sightlines['DEC'][iidx])) # Add plate/fiber to statistical DLAs if add_pf: dla_coord = sdss_survey.coord idx2, d2d, d3d = match_coordinates_sky(dla_coord, s_coord, nthneighbor=1) if np.min(d2d.to('arcsec').value) > 1.: raise ValueError("Bad match to sightlines") for jj,igd in enumerate(np.where(sdss_survey.mask)[0]): dla = sdss_survey._abs_sys[igd] try: dla.plate = sdss_survey.sightlines['PLATE'][idx2[jj]] except IndexError: pdb.set_trace() dla.fiber = sdss_survey.sightlines[fkey][idx2[jj]] # Finish ml_survey._abs_sys = systems if debug: ml2_coord = ml_survey.coord minsep = np.min(sv_coord.separation(ml2_coord)) minsep2 = np.min(sv_coord.separation(s_coord)) tmp = sdss_survey.sightlines[used] t_coord = SkyCoord(ra=tmp['RA'], dec=tmp['DEC'], unit='deg') minsep3 = np.min(sv_coord.separation(t_coord)) pdb.set_trace() ml_survey.sightlines = sdss_survey.sightlines[used] for key in idict.keys(): ml_tbl[key] = idict[key] ml_survey.ml_tbl = ml_tbl # Return return ml_survey