def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: Quasar Spectra from the COS-Halos Survey'.format(dset) ssa_dict = default_fields(Title, flux='flambda', fxcalib='ABSOLUTE') hdf[dset]['meta'].attrs['SSA_COS'] = json.dumps(ltu.jsonify(ssa_dict)) # HIRES ssa_dict = default_fields(Title, flux='normalized') hdf[dset]['meta'].attrs['SSA_HIRES'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: Quasar Spectra from the COS-Halos Survey'.format(dset) ssa_dict = default_fields(Title, flux='flambda', fxcalib='ABSOLUTE') hdf[dset]['meta'].attrs['SSA_COS'] = json.dumps(ltu.jsonify(ssa_dict)) # HIRES ssa_dict = default_fields(Title, flux='normalized') hdf[dset]['meta'].attrs['SSA_HIRES'] = json.dumps(ltu.jsonify(ssa_dict))
def test_default_fields(): title = 'BOSS: DR12 Quasars' ssa_dict = spdb_ssa.default_fields(title) assert ssa_dict['FluxUcd'] == 'arith.ratio;phot.flux.density' assert ssa_dict['FluxUnit'] == '' assert ssa_dict['SpecUcd'] == 'em.wl' assert ssa_dict['SpecUnit'] == 'Angstrom' # flambda ssa_dict = spdb_ssa.default_fields(title, flux='flambda') assert ssa_dict['FluxUcd'] == 'phot.fluDens;em.wl' # calib ssa_dict = spdb_ssa.default_fields(title, flux='flambda', fxcalib='ABSOLUTE') assert ssa_dict['FluxCalib'] == 'ABSOLUTE'
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: Dall''Aglio et al. (2008) compilation of VLT/UVES spectra'.format(dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: HST UV spectra for surveying LLS and DLAs'.format(dset) ssa_dict = default_fields(Title, flux='flambda', fxcalib='ABSOLUTE') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: SDSS DR7 Quasars'.format(dset) ssa_dict = default_fields(Title, flux='flambda', fxcalib='ABSOLUTE') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: Keck/HIRES KODIAQ DR1'.format(dset) ssa_dict = default_fields(Title, flux='normalized') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: Dall' 'Aglio et al. (2008) compilation of VLT/UVES spectra'.format( dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: The XQ-100 Survey of 100 z>3 quasars with VLT/XShooter'.format(dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: The Keck/ESI Survey for high-z DLAs'.format(dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: The Keck/ESI Survey for high-z DLAs'.format(dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: Giant Gemini GMOS Survey of z>4 quasars'.format(dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: HST and FUSE spectra of AGN and Quasars by Cooksey et al. (2010)'.format( dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: The Magellan uniform survey of damped Lya systems'.format( dset) ssa_dict = default_fields(Title, flux='normalized') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: Giant Gemini GMOS Survey of z>4 quasars'.format(dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def add_ssa(hdf, dset): """ Add SSA info to meta dataset Parameters ---------- hdf dset : str """ from specdb.ssa import default_fields Title = '{:s}: The XQ-100 Survey of 100 z>3 quasars with VLT/XShooter'.format( dset) ssa_dict = default_fields(Title, flux='flambda') hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
def mk_db(dbname, tree, outfil, iztbl, version='v00', id_key='PRIV_ID', publisher='Unknown', **kwargs): """ Generate the DB Parameters ---------- dbname : str Name for the database tree : str Path to top level of the tree of FITS files Typically, each branch in the tree corresponds to a single instrument outfil : str Output file name for the hdf5 file iztbl : Table or str If Table, see meta() docs for details on its format If str, it must be 'igmspec' and the user must have that DB downloaded version : str, optional Version code Returns ------- """ from specdb import defs # ztbl if isinstance(iztbl, str): if iztbl == 'igmspec': from specdb.specdb import IgmSpec igmsp = IgmSpec() ztbl = Table(igmsp.idb.hdf['quasars'][...]) elif isinstance(iztbl, Table): ztbl = iztbl else: raise IOError("Bad type for ztbl") # Find the branches branches = glob.glob(tree+'/*') branches.sort() # HDF5 file hdf = h5py.File(outfil,'w') # Defs zpri = defs.z_priority() gdict = {} # Main DB Table maindb, tkeys = spbu.start_maindb(id_key) # MAIN LOOP for ss,branch in enumerate(branches): # Skip files if not os.path.isdir(branch): continue print('Working on branch: {:s}'.format(branch)) # Files fits_files, out_tup = grab_files(branch) meta_file, mtbl_file, ssa_file = out_tup # Meta maxpix, phead, mdict, stype = 10000, None, None, 'QSO' if meta_file is not None: # Load meta_dict = ltu.loadjson(meta_file) # Maxpix if 'maxpix' in meta_dict.keys(): maxpix = meta_dict['maxpix'] # STYPE if 'stype' in meta_dict.keys(): stype = meta_dict['stype'] # Parse header if 'parse_head' in meta_dict.keys(): phead = meta_dict['parse_head'] if 'meta_dict' in meta_dict.keys(): mdict = meta_dict['meta_dict'] full_meta = mk_meta(fits_files, ztbl, mtbl_file=mtbl_file, parse_head=phead, mdict=mdict, **kwargs) # Update group dict group_name = branch.split('/')[-1] flag_g = spbu.add_to_group_dict(group_name, gdict) # IDs maindb = add_ids(maindb, full_meta, flag_g, tkeys, 'PRIV_ID', first=(flag_g==1)) # Ingest ingest_spectra(hdf, group_name, full_meta, max_npix=maxpix, **kwargs) # SSA if ssa_file is not None: user_ssa = ltu.loadjson(ssa_file) ssa_dict = default_fields(user_ssa['Title'], flux=user_ssa['flux'], fxcalib=user_ssa['fxcalib']) hdf[group_name]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict)) # Check stacking if not spbu.chk_vstack(hdf): print("Meta data will not stack using specdb.utils.clean_vstack") print("Proceed to write at your own risk..") pdb.set_trace() # Write write_hdf(hdf, str(dbname), maindb, zpri, gdict, version, Publisher=publisher) print("Wrote {:s} DB file".format(outfil))