def dust_transmission(wave,ebv): Rv = 3.1 extinction = ext_odonnell(wave,Rv=Rv) return 10**(-Rv*ebv[:,None]*extinction[None,:]/2.5)
def simulate_one_healpix(ifilename,args,model,obsconditions,decam_and_wise_filters, bassmzls_and_wise_filters,footprint_healpix_weight, footprint_healpix_nside, bal=None,sfdmap=None,eboss=None) : log = get_logger() # open filename and extract basic HEALPix information pixel, nside, hpxnest = get_healpix_info(ifilename) # using global seed (could be None) get seed for this particular pixel global_seed = args.seed seed = get_pixel_seed(pixel, nside, global_seed) # use this seed to generate future random numbers np.random.seed(seed) # get output file (we will write there spectra for this HEALPix pixel) ofilename = get_spectra_filename(args,nside,pixel) # get directory name (we will also write there zbest file) pixdir = os.path.dirname(ofilename) # get filename for truth file truth_filename = get_truth_filename(args,pixdir,nside,pixel) # get filename for zbest file zbest_filename = get_zbest_filename(args,pixdir,nside,pixel) if not args.overwrite : # check whether output exists or not if args.zbest : if os.path.isfile(ofilename) and os.path.isfile(zbest_filename) : log.info("skip existing {} and {}".format(ofilename,zbest_filename)) return else : # only test spectra file if os.path.isfile(ofilename) : log.info("skip existing {}".format(ofilename)) return # create sub-directories if required if len(pixdir)>0 : if not os.path.isdir(pixdir) : log.info("Creating dir {}".format(pixdir)) os.makedirs(pixdir) log.info("Read skewers in {}, random seed = {}".format(ifilename,seed)) # Read transmission from files. It might include DLA information, and it # might add metal transmission as well (from the HDU file). log.info("Read transmission file {}".format(ifilename)) trans_wave, transmission, metadata, dla_info = read_lya_skewers(ifilename,read_dlas=(args.dla=='file'),add_metals=args.metals_from_file,add_lyb=args.add_LYB) ### Add Finger-of-God, before generate the continua log.info("Add FOG to redshift with sigma {} to quasar redshift".format(args.sigma_kms_fog)) DZ_FOG = args.sigma_kms_fog/c*(1.+metadata['Z'])*np.random.normal(0,1,metadata['Z'].size) metadata['Z'] += DZ_FOG ### Select quasar within a given redshift range w = (metadata['Z']>=args.zmin) & (metadata['Z']<=args.zmax) transmission = transmission[w] metadata = metadata[:][w] DZ_FOG = DZ_FOG[w] # option to make for BOSS+eBOSS if not eboss is None: if args.downsampling or args.desi_footprint: raise ValueError("eboss option can not be run with " +"desi_footprint or downsampling") # Get the redshift distribution from SDSS selection = sdss_subsample_redshift(metadata["RA"],metadata["DEC"],metadata['Z'],eboss['redshift']) log.info("Select QSOs in BOSS+eBOSS redshift distribution {} -> {}".format(metadata['Z'].size,selection.sum())) if selection.sum()==0: log.warning("No intersection with BOSS+eBOSS redshift distribution") return transmission = transmission[selection] metadata = metadata[:][selection] DZ_FOG = DZ_FOG[selection] # figure out the density of all quasars N_highz = metadata['Z'].size # area of healpix pixel, in degrees area_deg2 = healpy.pixelfunc.nside2pixarea(nside,degrees=True) input_highz_dens_deg2 = N_highz/area_deg2 selection = sdss_subsample(metadata["RA"], metadata["DEC"], input_highz_dens_deg2,eboss['footprint']) log.info("Select QSOs in BOSS+eBOSS footprint {} -> {}".format(transmission.shape[0],selection.size)) if selection.size == 0 : log.warning("No intersection with BOSS+eBOSS footprint") return transmission = transmission[selection] metadata = metadata[:][selection] DZ_FOG = DZ_FOG[selection] if args.desi_footprint : footprint_healpix = footprint.radec2pix(footprint_healpix_nside, metadata["RA"], metadata["DEC"]) selection = np.where(footprint_healpix_weight[footprint_healpix]>0.99)[0] log.info("Select QSOs in DESI footprint {} -> {}".format(transmission.shape[0],selection.size)) if selection.size == 0 : log.warning("No intersection with DESI footprint") return transmission = transmission[selection] metadata = metadata[:][selection] DZ_FOG = DZ_FOG[selection] nqso=transmission.shape[0] if args.downsampling is not None : if args.downsampling <= 0 or args.downsampling > 1 : log.error("Down sampling fraction={} must be between 0 and 1".format(args.downsampling)) raise ValueError("Down sampling fraction={} must be between 0 and 1".format(args.downsampling)) indices = np.where(np.random.uniform(size=nqso)<args.downsampling)[0] if indices.size == 0 : log.warning("Down sampling from {} to 0 (by chance I presume)".format(nqso)) return transmission = transmission[indices] metadata = metadata[:][indices] DZ_FOG = DZ_FOG[indices] nqso = transmission.shape[0] if args.nmax is not None : if args.nmax < nqso : log.info("Limit number of QSOs from {} to nmax={} (random subsample)".format(nqso,args.nmax)) # take a random subsample indices = np.random.choice(np.arange(nqso),args.nmax,replace=False) ##Use random.choice instead of random.uniform (rarely but it does cause a duplication of qsos) transmission = transmission[indices] metadata = metadata[:][indices] DZ_FOG = DZ_FOG[indices] nqso = args.nmax # In previous versions of the London mocks we needed to enforce F=1 for # z > z_qso here, but this is not needed anymore. Moreover, now we also # have metal absorption that implies F < 1 for z > z_qso #for ii in range(len(metadata)): # transmission[ii][trans_wave>lambda_RF_LYA*(metadata[ii]['Z']+1)]=1.0 # if requested, add DLA to the transmission skewers if args.dla is not None : # if adding random DLAs, we will need a new random generator if args.dla=='random': log.info('Adding DLAs randomly') random_state_just_for_dlas = np.random.RandomState(seed) elif args.dla=='file': log.info('Adding DLAs from transmission file') else: log.error("Wrong option for args.dla: "+args.dla) sys.exit(1) # if adding DLAs, the information will be printed here dla_filename=os.path.join(pixdir,"dla-{}-{}.fits".format(nside,pixel)) dla_NHI, dla_z, dla_qid,dla_id = [], [], [],[] # identify minimum Lya redshift in transmission files min_lya_z = np.min(trans_wave/lambda_RF_LYA - 1) # loop over quasars in pixel for ii in range(len(metadata)): # quasars with z < min_z will not have any DLA in spectrum if min_lya_z>metadata['Z'][ii]: continue # quasar ID idd=metadata['MOCKID'][ii] dlas=[] if args.dla=='file': for dla in dla_info[dla_info['MOCKID']==idd]: # Adding only DLAs with z < zqso if dla['Z_DLA_RSD']>=metadata['Z'][ii]: continue dlas.append(dict(z=dla['Z_DLA_RSD'],N=dla['N_HI_DLA'],dlaid=dla['DLAID'])) transmission_dla = dla_spec(trans_wave,dlas) elif args.dla=='random': dlas, transmission_dla = insert_dlas(trans_wave, metadata['Z'][ii], rstate=random_state_just_for_dlas) for idla in dlas: idla['dlaid']+=idd*1000 #Added to have unique DLA ids. Same format as DLAs from file. # multiply transmissions and store information for the DLA file if len(dlas)>0: transmission[ii] = transmission_dla * transmission[ii] dla_z += [idla['z'] for idla in dlas] dla_NHI += [idla['N'] for idla in dlas] dla_id += [idla['dlaid'] for idla in dlas] dla_qid += [idd]*len(dlas) log.info('Added {} DLAs'.format(len(dla_id))) # write file with DLA information if len(dla_id)>0: dla_meta=Table() dla_meta['NHI'] = dla_NHI dla_meta['Z_DLA'] = dla_z #This is Z_DLA_RSD in transmision. dla_meta['TARGETID']=dla_qid dla_meta['DLAID'] = dla_id hdu_dla = pyfits.convenience.table_to_hdu(dla_meta) hdu_dla.name="DLA_META" del(dla_meta) log.info("DLA metadata to be saved in {}".format(truth_filename)) else: hdu_dla=pyfits.PrimaryHDU() hdu_dla.name="DLA_META" # if requested, extend transmission skewers to cover full spectrum if args.target_selection or args.bbflux : wanted_min_wave = 3329. # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...) wanted_max_wave = 55501. # needed to compute magnitudes for wise2010-W2 if trans_wave[0]>wanted_min_wave : log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(wanted_min_wave),int(trans_wave[-1]))) # pad with ones at short wavelength, we assume F = 1 for z <~ 1.7 # we don't need any wavelength resolution here new_trans_wave = np.append([wanted_min_wave,trans_wave[0]-0.01],trans_wave) new_transmission = np.ones((transmission.shape[0],new_trans_wave.size)) new_transmission[:,2:] = transmission trans_wave = new_trans_wave transmission = new_transmission if trans_wave[-1]<wanted_max_wave : log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(trans_wave[0]),int(wanted_max_wave))) # pad with ones at long wavelength because we assume F = 1 coarse_dwave = 2. # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range n = int((wanted_max_wave-trans_wave[-1])/coarse_dwave)+1 new_trans_wave = np.append(trans_wave,np.linspace(trans_wave[-1]+coarse_dwave,trans_wave[-1]+coarse_dwave*(n+1),n)) new_transmission = np.ones((transmission.shape[0],new_trans_wave.size)) new_transmission[:,:trans_wave.size] = transmission trans_wave = new_trans_wave transmission = new_transmission # whether to use QSO or SIMQSO to generate quasar continua. Simulate # spectra in the north vs south separately because they're on different # photometric systems. south = np.where( is_south(metadata['DEC']) )[0] north = np.where( ~is_south(metadata['DEC']) )[0] meta, qsometa = empty_metatable(nqso, objtype='QSO', simqso=not args.no_simqso) if args.no_simqso: log.info("Simulate {} QSOs with QSO templates".format(nqso)) tmp_qso_flux = np.zeros([nqso, len(model.eigenwave)], dtype='f4') tmp_qso_wave = np.zeros_like(tmp_qso_flux) else: log.info("Simulate {} QSOs with SIMQSO templates".format(nqso)) tmp_qso_flux = np.zeros([nqso, len(model.basewave)], dtype='f4') tmp_qso_wave = model.basewave for these, issouth in zip( (north, south), (False, True) ): # number of quasars in these nt = len(these) if nt<=0: continue if not eboss is None: # for eBOSS, generate only quasars with r<22 magrange = (17.0, 21.3) _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \ = model.make_templates(nmodel=nt, redshift=metadata['Z'][these], magrange=magrange, lyaforest=False, nocolorcuts=True, noresample=True, seed=seed, south=issouth) else: _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \ = model.make_templates(nmodel=nt, redshift=metadata['Z'][these], lyaforest=False, nocolorcuts=True, noresample=True, seed=seed, south=issouth) _meta['TARGETID'] = metadata['MOCKID'][these] _qsometa['TARGETID'] = metadata['MOCKID'][these] meta[these] = _meta qsometa[these] = _qsometa tmp_qso_flux[these, :] = _tmp_qso_flux if args.no_simqso: tmp_qso_wave[these, :] = _tmp_qso_wave log.info("Resample to transmission wavelength grid") qso_flux=np.zeros((tmp_qso_flux.shape[0],trans_wave.size)) if args.no_simqso: for q in range(tmp_qso_flux.shape[0]) : qso_flux[q]=np.interp(trans_wave,tmp_qso_wave[q],tmp_qso_flux[q]) else: for q in range(tmp_qso_flux.shape[0]) : qso_flux[q]=np.interp(trans_wave,tmp_qso_wave,tmp_qso_flux[q]) tmp_qso_flux = qso_flux tmp_qso_wave = trans_wave # if requested, add BAL features to the quasar continua if args.balprob: if args.balprob<=1. and args.balprob >0: log.info("Adding BALs with probability {}".format(args.balprob)) # save current random state rnd_state = np.random.get_state() tmp_qso_flux,meta_bal=bal.insert_bals(tmp_qso_wave,tmp_qso_flux, metadata['Z'], balprob=args.balprob,seed=seed) # restore random state to get the same random numbers later # as when we don't insert BALs np.random.set_state(rnd_state) meta_bal['TARGETID'] = metadata['MOCKID'] w = meta_bal['TEMPLATEID']!=-1 meta_bal = meta_bal[:][w] hdu_bal=pyfits.convenience.table_to_hdu(meta_bal); hdu_bal.name="BAL_META" del meta_bal else: balstr=str(args.balprob) log.error("BAL probability is not between 0 and 1 : "+balstr) sys.exit(1) # Multiply quasar continua by transmitted flux fraction # (at this point transmission file might include Ly-beta, metals and DLAs) log.info("Apply transmitted flux fraction") if not args.no_transmission: tmp_qso_flux = apply_lya_transmission(tmp_qso_wave,tmp_qso_flux, trans_wave,transmission) # if requested, compute metal transmission on the fly # (if not included already from the transmission file) if args.metals is not None: if args.metals_from_file : log.error('you cannot add metals twice') raise ValueError('you cannot add metals twice') if args.no_transmission: log.error('you cannot add metals if asking for no-transmission') raise ValueError('can not add metals if using no-transmission') lstMetals = '' for m in args.metals: lstMetals += m+', ' log.info("Apply metals: {}".format(lstMetals[:-2])) tmp_qso_flux = apply_metals_transmission(tmp_qso_wave,tmp_qso_flux, trans_wave,transmission,args.metals) # if requested, compute magnitudes and apply target selection. Need to do # this calculation separately for QSOs in the north vs south. bbflux=None if args.target_selection or args.bbflux : bands=['FLUX_G','FLUX_R','FLUX_Z', 'FLUX_W1', 'FLUX_W2'] bbflux=dict() bbflux['SOUTH'] = is_south(metadata['DEC']) for band in bands: bbflux[band] = np.zeros(nqso) # need to recompute the magnitudes to account for lya transmission log.info("Compute QSO magnitudes") for these, filters in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (bassmzls_and_wise_filters, decam_and_wise_filters) ): if np.count_nonzero(these) > 0: maggies = filters.get_ab_maggies(1e-17 * tmp_qso_flux[these, :], tmp_qso_wave) for band, filt in zip( bands, maggies.colnames ): bbflux[band][these] = np.ma.getdata(1e9 * maggies[filt]) # nanomaggies if args.target_selection : log.info("Apply target selection") isqso = np.ones(nqso, dtype=bool) for these, issouth in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (False, True) ): if np.count_nonzero(these) > 0: # optical cuts only if using QSO vs SIMQSO isqso[these] &= isQSO_colors(gflux=bbflux['FLUX_G'][these], rflux=bbflux['FLUX_R'][these], zflux=bbflux['FLUX_Z'][these], w1flux=bbflux['FLUX_W1'][these], w2flux=bbflux['FLUX_W2'][these], south=issouth, optical=args.no_simqso) log.info("Target selection: {}/{} QSOs selected".format(np.sum(isqso),nqso)) selection=np.where(isqso)[0] if selection.size==0 : return tmp_qso_flux = tmp_qso_flux[selection] metadata = metadata[:][selection] meta = meta[:][selection] qsometa = qsometa[:][selection] DZ_FOG = DZ_FOG[selection] for band in bands : bbflux[band] = bbflux[band][selection] bbflux['SOUTH']=bbflux['SOUTH'][selection] nqso = selection.size log.info("Resample to a linear wavelength grid (needed by DESI sim.)") # careful integration of bins, not just a simple interpolation qso_wave=np.linspace(args.wmin,args.wmax,int((args.wmax-args.wmin)/args.dwave)+1) qso_flux=np.zeros((tmp_qso_flux.shape[0],qso_wave.size)) for q in range(tmp_qso_flux.shape[0]) : qso_flux[q]=resample_flux(qso_wave,tmp_qso_wave,tmp_qso_flux[q]) log.info("Simulate DESI observation and write output file") if "MOCKID" in metadata.dtype.names : #log.warning("Using MOCKID as TARGETID") targetid=np.array(metadata["MOCKID"]).astype(int) elif "ID" in metadata.dtype.names : log.warning("Using ID as TARGETID") targetid=np.array(metadata["ID"]).astype(int) else : log.warning("No TARGETID") targetid=None specmeta={"HPXNSIDE":nside,"HPXPIXEL":pixel, "HPXNEST":hpxnest} if args.target_selection or args.bbflux : fibermap_columns = dict( FLUX_G = bbflux['FLUX_G'], FLUX_R = bbflux['FLUX_R'], FLUX_Z = bbflux['FLUX_Z'], FLUX_W1 = bbflux['FLUX_W1'], FLUX_W2 = bbflux['FLUX_W2'], ) photsys = np.full(len(bbflux['FLUX_G']), 'N', dtype='S1') photsys[bbflux['SOUTH']] = b'S' fibermap_columns['PHOTSYS'] = photsys else : fibermap_columns=None # Attenuate the spectra for extinction if not sfdmap is None: Rv=3.1 #set by default indx=np.arange(metadata['RA'].size) extinction =Rv*ext_odonnell(qso_wave) EBV = sfdmap.ebv(metadata['RA'],metadata['DEC'], scaling=1.0) qso_flux *=10**( -0.4 * EBV[indx, np.newaxis] * extinction) if fibermap_columns is not None: fibermap_columns['EBV']=EBV EBV0=0.0 EBV_med=np.median(EBV) Ag = 3.303 * (EBV_med - EBV0) exptime_fact=np.power(10.0, (2.0 * Ag / 2.5)) obsconditions['EXPTIME']*=exptime_fact log.info("Dust extinction added") log.info('exposure time adjusted to {}'.format(obsconditions['EXPTIME'])) sim_spectra(qso_wave,qso_flux, args.program, obsconditions=obsconditions,spectra_filename=ofilename, sourcetype="qso", skyerr=args.skyerr,ra=metadata["RA"],dec=metadata["DEC"],targetid=targetid, meta=specmeta,seed=seed,fibermap_columns=fibermap_columns,use_poisson=False) # use Poisson = False to get reproducible results. ### Keep input redshift Z_spec = metadata['Z'].copy() Z_input = metadata['Z'].copy()-DZ_FOG ### Add a shift to the redshift, simulating the systematic imprecision of redrock DZ_sys_shift = args.shift_kms_los/c*(1.+Z_input) log.info('Added a shift of {} km/s to the redshift'.format(args.shift_kms_los)) meta['REDSHIFT'] += DZ_sys_shift metadata['Z'] += DZ_sys_shift ### Add a shift to the redshift, simulating the statistic imprecision of redrock if args.gamma_kms_zfit: log.info("Added zfit error with gamma {} to zbest".format(args.gamma_kms_zfit)) DZ_stat_shift = mod_cauchy(loc=0,scale=args.gamma_kms_zfit,size=nqso,cut=3000)/c*(1.+Z_input) meta['REDSHIFT'] += DZ_stat_shift metadata['Z'] += DZ_stat_shift ## Write the truth file, including metadata for DLAs and BALs log.info('Writing a truth file {}'.format(truth_filename)) meta.rename_column('REDSHIFT','Z') meta.add_column(Column(Z_spec,name='TRUEZ')) meta.add_column(Column(Z_input,name='Z_INPUT')) meta.add_column(Column(DZ_FOG,name='DZ_FOG')) meta.add_column(Column(DZ_sys_shift,name='DZ_SYS')) if args.gamma_kms_zfit: meta.add_column(Column(DZ_stat_shift,name='DZ_STAT')) if 'Z_noRSD' in metadata.dtype.names: meta.add_column(Column(metadata['Z_noRSD'],name='Z_NORSD')) else: log.info('Z_noRSD field not present in transmission file. Z_NORSD not saved to truth file') #Save global seed and pixel seed to primary header hdr=pyfits.Header() hdr['GSEED']=global_seed hdr['PIXSEED']=seed hdu = pyfits.convenience.table_to_hdu(meta) hdu.header['EXTNAME'] = 'TRUTH' hduqso=pyfits.convenience.table_to_hdu(qsometa) hduqso.header['EXTNAME'] = 'QSO_META' hdulist=pyfits.HDUList([pyfits.PrimaryHDU(header=hdr),hdu,hduqso]) if args.dla: hdulist.append(hdu_dla) if args.balprob: hdulist.append(hdu_bal) hdulist.writeto(truth_filename, overwrite=True) hdulist.close() if args.zbest : log.info("Read fibermap") fibermap = read_fibermap(ofilename) log.info("Writing a zbest file {}".format(zbest_filename)) columns = [ ('CHI2', 'f8'), ('COEFF', 'f8' , (4,)), ('Z', 'f8'), ('ZERR', 'f8'), ('ZWARN', 'i8'), ('SPECTYPE', (str,96)), ('SUBTYPE', (str,16)), ('TARGETID', 'i8'), ('DELTACHI2', 'f8'), ('BRICKNAME', (str,8))] zbest = Table(np.zeros(nqso, dtype=columns)) zbest['CHI2'][:] = 0. zbest['Z'][:] = metadata['Z'] zbest['ZERR'][:] = 0. zbest['ZWARN'][:] = 0 zbest['SPECTYPE'][:] = 'QSO' zbest['SUBTYPE'][:] = '' zbest['TARGETID'][:] = metadata['MOCKID'] zbest['DELTACHI2'][:] = 25. hzbest = pyfits.convenience.table_to_hdu(zbest); hzbest.name='ZBEST' hfmap = pyfits.convenience.table_to_hdu(fibermap); hfmap.name='FIBERMAP' hdulist =pyfits.HDUList([pyfits.PrimaryHDU(),hzbest,hfmap]) hdulist.writeto(zbest_filename, overwrite=True) hdulist.close() # see if this helps with memory issue
def simulate_one_healpix(ifilename,args,model,obsconditions,decam_and_wise_filters, bassmzls_and_wise_filters,footprint_healpix_weight, footprint_healpix_nside, bal=None,sfdmap=None,eboss=None) : log = get_logger() # open filename and extract basic HEALPix information pixel, nside, hpxnest = get_healpix_info(ifilename) # using global seed (could be None) get seed for this particular pixel global_seed = args.seed seed = get_pixel_seed(pixel, nside, global_seed) # use this seed to generate future random numbers np.random.seed(seed) # get output file (we will write there spectra for this HEALPix pixel) ofilename = get_spectra_filename(args,nside,pixel) # get directory name (we will also write there zbest file) pixdir = os.path.dirname(ofilename) # get filename for truth file truth_filename = get_truth_filename(args,pixdir,nside,pixel) # get filename for zbest file zbest_filename = get_zbest_filename(args,pixdir,nside,pixel) if not args.overwrite : # check whether output exists or not if args.zbest : if os.path.isfile(ofilename) and os.path.isfile(zbest_filename) : log.info("skip existing {} and {}".format(ofilename,zbest_filename)) return else : # only test spectra file if os.path.isfile(ofilename) : log.info("skip existing {}".format(ofilename)) return # create sub-directories if required if len(pixdir)>0 : if not os.path.isdir(pixdir) : log.info("Creating dir {}".format(pixdir)) os.makedirs(pixdir) log.info("Read skewers in {}, random seed = {}".format(ifilename,seed)) # Read transmission from files. It might include DLA information, and it # might add metal transmission as well (from the HDU file). log.info("Read transmission file {}".format(ifilename)) trans_wave, transmission, metadata, dla_info = read_lya_skewers(ifilename,read_dlas=(args.dla=='file'),add_metals=args.metals_from_file) ### Add Finger-of-God, before generate the continua log.info("Add FOG to redshift with sigma {} to quasar redshift".format(args.sigma_kms_fog)) DZ_FOG = args.sigma_kms_fog/c*(1.+metadata['Z'])*np.random.normal(0,1,metadata['Z'].size) metadata['Z'] += DZ_FOG ### Select quasar within a given redshift range w = (metadata['Z']>=args.zmin) & (metadata['Z']<=args.zmax) transmission = transmission[w] metadata = metadata[:][w] DZ_FOG = DZ_FOG[w] # option to make for BOSS+eBOSS if not eboss is None: if args.downsampling or args.desi_footprint: raise ValueError("eboss option can not be run with " +"desi_footprint or downsampling") # Get the redshift distribution from SDSS selection = sdss_subsample_redshift(metadata["RA"],metadata["DEC"],metadata['Z'],eboss['redshift']) log.info("Select QSOs in BOSS+eBOSS redshift distribution {} -> {}".format(metadata['Z'].size,selection.sum())) if selection.sum()==0: log.warning("No intersection with BOSS+eBOSS redshift distribution") return transmission = transmission[selection] metadata = metadata[:][selection] DZ_FOG = DZ_FOG[selection] # figure out the density of all quasars N_highz = metadata['Z'].size # area of healpix pixel, in degrees area_deg2 = healpy.pixelfunc.nside2pixarea(nside,degrees=True) input_highz_dens_deg2 = N_highz/area_deg2 selection = sdss_subsample(metadata["RA"], metadata["DEC"], input_highz_dens_deg2,eboss['footprint']) log.info("Select QSOs in BOSS+eBOSS footprint {} -> {}".format(transmission.shape[0],selection.size)) if selection.size == 0 : log.warning("No intersection with BOSS+eBOSS footprint") return transmission = transmission[selection] metadata = metadata[:][selection] DZ_FOG = DZ_FOG[selection] if args.desi_footprint : footprint_healpix = footprint.radec2pix(footprint_healpix_nside, metadata["RA"], metadata["DEC"]) selection = np.where(footprint_healpix_weight[footprint_healpix]>0.99)[0] log.info("Select QSOs in DESI footprint {} -> {}".format(transmission.shape[0],selection.size)) if selection.size == 0 : log.warning("No intersection with DESI footprint") return transmission = transmission[selection] metadata = metadata[:][selection] DZ_FOG = DZ_FOG[selection] nqso=transmission.shape[0] if args.downsampling is not None : if args.downsampling <= 0 or args.downsampling > 1 : log.error("Down sampling fraction={} must be between 0 and 1".format(args.downsampling)) raise ValueError("Down sampling fraction={} must be between 0 and 1".format(args.downsampling)) indices = np.where(np.random.uniform(size=nqso)<args.downsampling)[0] if indices.size == 0 : log.warning("Down sampling from {} to 0 (by chance I presume)".format(nqso)) return transmission = transmission[indices] metadata = metadata[:][indices] DZ_FOG = DZ_FOG[indices] nqso = transmission.shape[0] if args.nmax is not None : if args.nmax < nqso : log.info("Limit number of QSOs from {} to nmax={} (random subsample)".format(nqso,args.nmax)) # take a random subsample indices = (np.random.uniform(size=args.nmax)*nqso).astype(int) transmission = transmission[indices] metadata = metadata[:][indices] DZ_FOG = DZ_FOG[indices] nqso = args.nmax # In previous versions of the London mocks we needed to enforce F=1 for # z > z_qso here, but this is not needed anymore. Moreover, now we also # have metal absorption that implies F < 1 for z > z_qso #for ii in range(len(metadata)): # transmission[ii][trans_wave>lambda_RF_LYA*(metadata[ii]['Z']+1)]=1.0 # if requested, add DLA to the transmission skewers if args.dla is not None : # if adding random DLAs, we will need a new random generator if args.dla=='random': log.info('Adding DLAs randomly') random_state_just_for_dlas = np.random.RandomState(seed) elif args.dla=='file': log.info('Adding DLAs from transmission file') else: log.error("Wrong option for args.dla: "+args.dla) sys.exit(1) # if adding DLAs, the information will be printed here dla_filename=os.path.join(pixdir,"dla-{}-{}.fits".format(nside,pixel)) dla_NHI, dla_z, dla_qid,dla_id = [], [], [],[] # identify minimum Lya redshift in transmission files min_lya_z = np.min(trans_wave/lambda_RF_LYA - 1) # loop over quasars in pixel for ii in range(len(metadata)): # quasars with z < min_z will not have any DLA in spectrum if min_lya_z>metadata['Z'][ii]: continue # quasar ID idd=metadata['MOCKID'][ii] dlas=[] if args.dla=='file': for dla in dla_info[dla_info['MOCKID']==idd]: # Adding only DLAs with z < zqso if dla['Z_DLA_RSD']>=metadata['Z'][ii]: continue dlas.append(dict(z=dla['Z_DLA_RSD'],N=dla['N_HI_DLA'],dlaid=dla['DLAID'])) transmission_dla = dla_spec(trans_wave,dlas) elif args.dla=='random': dlas, transmission_dla = insert_dlas(trans_wave, metadata['Z'][ii], rstate=random_state_just_for_dlas) for idla in dlas: idla['dlaid']+=idd*1000 #Added to have unique DLA ids. Same format as DLAs from file. # multiply transmissions and store information for the DLA file if len(dlas)>0: transmission[ii] = transmission_dla * transmission[ii] dla_z += [idla['z'] for idla in dlas] dla_NHI += [idla['N'] for idla in dlas] dla_id += [idla['dlaid'] for idla in dlas] dla_qid += [idd]*len(dlas) log.info('Added {} DLAs'.format(len(dla_id))) # write file with DLA information if len(dla_id)>0: dla_meta=Table() dla_meta['NHI'] = dla_NHI dla_meta['Z_DLA'] = dla_z #This is Z_DLA_RSD in transmision. dla_meta['TARGETID']=dla_qid dla_meta['DLAID'] = dla_id hdu_dla = pyfits.convenience.table_to_hdu(dla_meta) hdu_dla.name="DLA_META" del(dla_meta) log.info("DLA metadata to be saved in {}".format(truth_filename)) else: hdu_dla=pyfits.PrimaryHDU() hdu_dla.name="DLA_META" # if requested, extend transmission skewers to cover full spectrum if args.target_selection or args.bbflux : wanted_min_wave = 3329. # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...) wanted_max_wave = 55501. # needed to compute magnitudes for wise2010-W2 if trans_wave[0]>wanted_min_wave : log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(wanted_min_wave),int(trans_wave[-1]))) # pad with ones at short wavelength, we assume F = 1 for z <~ 1.7 # we don't need any wavelength resolution here new_trans_wave = np.append([wanted_min_wave,trans_wave[0]-0.01],trans_wave) new_transmission = np.ones((transmission.shape[0],new_trans_wave.size)) new_transmission[:,2:] = transmission trans_wave = new_trans_wave transmission = new_transmission if trans_wave[-1]<wanted_max_wave : log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(trans_wave[0]),int(wanted_max_wave))) # pad with ones at long wavelength because we assume F = 1 coarse_dwave = 2. # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range n = int((wanted_max_wave-trans_wave[-1])/coarse_dwave)+1 new_trans_wave = np.append(trans_wave,np.linspace(trans_wave[-1]+coarse_dwave,trans_wave[-1]+coarse_dwave*(n+1),n)) new_transmission = np.ones((transmission.shape[0],new_trans_wave.size)) new_transmission[:,:trans_wave.size] = transmission trans_wave = new_trans_wave transmission = new_transmission # whether to use QSO or SIMQSO to generate quasar continua. Simulate # spectra in the north vs south separately because they're on different # photometric systems. south = np.where( is_south(metadata['DEC']) )[0] north = np.where( ~is_south(metadata['DEC']) )[0] meta, qsometa = empty_metatable(nqso, objtype='QSO', simqso=not args.no_simqso) if args.no_simqso: log.info("Simulate {} QSOs with QSO templates".format(nqso)) tmp_qso_flux = np.zeros([nqso, len(model.eigenwave)], dtype='f4') tmp_qso_wave = np.zeros_like(tmp_qso_flux) else: log.info("Simulate {} QSOs with SIMQSO templates".format(nqso)) tmp_qso_flux = np.zeros([nqso, len(model.basewave)], dtype='f4') tmp_qso_wave = model.basewave for these, issouth in zip( (north, south), (False, True) ): # number of quasars in these nt = len(these) if nt<=0: continue if not eboss is None: # for eBOSS, generate only quasars with r<22 magrange = (17.0, 21.3) _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \ = model.make_templates(nmodel=nt, redshift=metadata['Z'][these], magrange=magrange, lyaforest=False, nocolorcuts=True, noresample=True, seed=seed, south=issouth) else: _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \ = model.make_templates(nmodel=nt, redshift=metadata['Z'][these], lyaforest=False, nocolorcuts=True, noresample=True, seed=seed, south=issouth) _meta['TARGETID'] = metadata['MOCKID'][these] _qsometa['TARGETID'] = metadata['MOCKID'][these] meta[these] = _meta qsometa[these] = _qsometa tmp_qso_flux[these, :] = _tmp_qso_flux if args.no_simqso: tmp_qso_wave[these, :] = _tmp_qso_wave log.info("Resample to transmission wavelength grid") qso_flux=np.zeros((tmp_qso_flux.shape[0],trans_wave.size)) if args.no_simqso: for q in range(tmp_qso_flux.shape[0]) : qso_flux[q]=np.interp(trans_wave,tmp_qso_wave[q],tmp_qso_flux[q]) else: for q in range(tmp_qso_flux.shape[0]) : qso_flux[q]=np.interp(trans_wave,tmp_qso_wave,tmp_qso_flux[q]) tmp_qso_flux = qso_flux tmp_qso_wave = trans_wave # if requested, add BAL features to the quasar continua if args.balprob: if args.balprob<=1. and args.balprob >0: log.info("Adding BALs with probability {}".format(args.balprob)) # save current random state rnd_state = np.random.get_state() tmp_qso_flux,meta_bal=bal.insert_bals(tmp_qso_wave,tmp_qso_flux, metadata['Z'], balprob=args.balprob,seed=seed) # restore random state to get the same random numbers later # as when we don't insert BALs np.random.set_state(rnd_state) meta_bal['TARGETID'] = metadata['MOCKID'] w = meta_bal['TEMPLATEID']!=-1 meta_bal = meta_bal[:][w] hdu_bal=pyfits.convenience.table_to_hdu(meta_bal); hdu_bal.name="BAL_META" del meta_bal else: balstr=str(args.balprob) log.error("BAL probability is not between 0 and 1 : "+balstr) sys.exit(1) # Multiply quasar continua by transmitted flux fraction # (at this point transmission file might include Ly-beta, metals and DLAs) log.info("Apply transmitted flux fraction") if not args.no_transmission: tmp_qso_flux = apply_lya_transmission(tmp_qso_wave,tmp_qso_flux, trans_wave,transmission) # if requested, compute metal transmission on the fly # (if not included already from the transmission file) if args.metals is not None: if args.metals_from_file: log.error('you cannot add metals twice') raise ValueError('you cannot add metals twice') if args.no_transmission: log.error('you cannot add metals if asking for no-transmission') raise ValueError('can not add metals if using no-transmission') lstMetals = '' for m in args.metals: lstMetals += m+', ' log.info("Apply metals: {}".format(lstMetals[:-2])) tmp_qso_flux = apply_metals_transmission(tmp_qso_wave,tmp_qso_flux, trans_wave,transmission,args.metals) # if requested, compute magnitudes and apply target selection. Need to do # this calculation separately for QSOs in the north vs south. bbflux=None if args.target_selection or args.bbflux : bands=['FLUX_G','FLUX_R','FLUX_Z', 'FLUX_W1', 'FLUX_W2'] bbflux=dict() bbflux['SOUTH'] = is_south(metadata['DEC']) for band in bands: bbflux[band] = np.zeros(nqso) # need to recompute the magnitudes to account for lya transmission log.info("Compute QSO magnitudes") for these, filters in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (bassmzls_and_wise_filters, decam_and_wise_filters) ): if np.count_nonzero(these) > 0: maggies = filters.get_ab_maggies(1e-17 * tmp_qso_flux[these, :], tmp_qso_wave) for band, filt in zip( bands, maggies.colnames ): bbflux[band][these] = np.ma.getdata(1e9 * maggies[filt]) # nanomaggies if args.target_selection : log.info("Apply target selection") isqso = np.ones(nqso, dtype=bool) for these, issouth in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (False, True) ): if np.count_nonzero(these) > 0: # optical cuts only if using QSO vs SIMQSO isqso[these] &= isQSO_colors(gflux=bbflux['FLUX_G'][these], rflux=bbflux['FLUX_R'][these], zflux=bbflux['FLUX_Z'][these], w1flux=bbflux['FLUX_W1'][these], w2flux=bbflux['FLUX_W2'][these], south=issouth, optical=args.no_simqso) log.info("Target selection: {}/{} QSOs selected".format(np.sum(isqso),nqso)) selection=np.where(isqso)[0] if selection.size==0 : return tmp_qso_flux = tmp_qso_flux[selection] metadata = metadata[:][selection] meta = meta[:][selection] qsometa = qsometa[:][selection] DZ_FOG = DZ_FOG[selection] for band in bands : bbflux[band] = bbflux[band][selection] nqso = selection.size log.info("Resample to a linear wavelength grid (needed by DESI sim.)") # careful integration of bins, not just a simple interpolation qso_wave=np.linspace(args.wmin,args.wmax,int((args.wmax-args.wmin)/args.dwave)+1) qso_flux=np.zeros((tmp_qso_flux.shape[0],qso_wave.size)) for q in range(tmp_qso_flux.shape[0]) : qso_flux[q]=resample_flux(qso_wave,tmp_qso_wave,tmp_qso_flux[q]) log.info("Simulate DESI observation and write output file") if "MOCKID" in metadata.dtype.names : #log.warning("Using MOCKID as TARGETID") targetid=np.array(metadata["MOCKID"]).astype(int) elif "ID" in metadata.dtype.names : log.warning("Using ID as TARGETID") targetid=np.array(metadata["ID"]).astype(int) else : log.warning("No TARGETID") targetid=None specmeta={"HPXNSIDE":nside,"HPXPIXEL":pixel, "HPXNEST":hpxnest} if args.target_selection or args.bbflux : fibermap_columns = dict( FLUX_G = bbflux['FLUX_G'], FLUX_R = bbflux['FLUX_R'], FLUX_Z = bbflux['FLUX_Z'], FLUX_W1 = bbflux['FLUX_W1'], FLUX_W2 = bbflux['FLUX_W2'], ) photsys = np.full(len(bbflux['FLUX_G']), 'N', dtype='S1') photsys[bbflux['SOUTH']] = b'S' fibermap_columns['PHOTSYS'] = photsys else : fibermap_columns=None # Attenuate the spectra for extinction if not sfdmap is None: Rv=3.1 #set by default indx=np.arange(metadata['RA'].size) extinction =Rv*ext_odonnell(qso_wave) EBV = sfdmap.ebv(metadata['RA'],metadata['DEC'], scaling=1.0) qso_flux *=10**( -0.4 * EBV[indx, np.newaxis] * extinction) if fibermap_columns is not None: fibermap_columns['EBV']=EBV EBV0=0.0 EBV_med=np.median(EBV) Ag = 3.303 * (EBV_med - EBV0) exptime_fact=np.power(10.0, (2.0 * Ag / 2.5)) obsconditions['EXPTIME']*=exptime_fact log.info("Dust extinction added") log.info('exposure time adjusted to {}'.format(obsconditions['EXPTIME'])) sim_spectra(qso_wave,qso_flux, args.program, obsconditions=obsconditions,spectra_filename=ofilename, sourcetype="qso", skyerr=args.skyerr,ra=metadata["RA"],dec=metadata["DEC"],targetid=targetid, meta=specmeta,seed=seed,fibermap_columns=fibermap_columns,use_poisson=False) # use Poisson = False to get reproducible results. ### Keep input redshift Z_spec = metadata['Z'].copy() Z_input = metadata['Z'].copy()-DZ_FOG ### Add a shift to the redshift, simulating the systematic imprecision of redrock DZ_sys_shift = args.shift_kms_los/c*(1.+Z_input) log.info('Added a shift of {} km/s to the redshift'.format(args.shift_kms_los)) meta['REDSHIFT'] += DZ_sys_shift metadata['Z'] += DZ_sys_shift ### Add a shift to the redshift, simulating the statistic imprecision of redrock if args.gamma_kms_zfit: log.info("Added zfit error with gamma {} to zbest".format(args.gamma_kms_zfit)) DZ_stat_shift = mod_cauchy(loc=0,scale=args.gamma_kms_zfit,size=nqso,cut=3000)/c*(1.+Z_input) meta['REDSHIFT'] += DZ_stat_shift metadata['Z'] += DZ_stat_shift ## Write the truth file, including metadata for DLAs and BALs log.info('Writing a truth file {}'.format(truth_filename)) meta.rename_column('REDSHIFT','Z') meta.add_column(Column(Z_spec,name='TRUEZ')) meta.add_column(Column(Z_input,name='Z_INPUT')) meta.add_column(Column(DZ_FOG,name='DZ_FOG')) meta.add_column(Column(DZ_sys_shift,name='DZ_SYS')) if args.gamma_kms_zfit: meta.add_column(Column(DZ_stat_shift,name='DZ_STAT')) if 'Z_noRSD' in metadata.dtype.names: meta.add_column(Column(metadata['Z_noRSD'],name='Z_NORSD')) else: log.info('Z_noRSD field not present in transmission file. Z_NORSD not saved to truth file') hdu = pyfits.convenience.table_to_hdu(meta) hdu.header['EXTNAME'] = 'TRUTH' hduqso=pyfits.convenience.table_to_hdu(qsometa) hduqso.header['EXTNAME'] = 'QSO_META' hdulist=pyfits.HDUList([pyfits.PrimaryHDU(),hdu,hduqso]) if args.dla: hdulist.append(hdu_dla) if args.balprob: hdulist.append(hdu_bal) hdulist.writeto(truth_filename, overwrite=True) hdulist.close() if args.zbest : log.info("Read fibermap") fibermap = read_fibermap(ofilename) log.info("Writing a zbest file {}".format(zbest_filename)) columns = [ ('CHI2', 'f8'), ('COEFF', 'f8' , (4,)), ('Z', 'f8'), ('ZERR', 'f8'), ('ZWARN', 'i8'), ('SPECTYPE', (str,96)), ('SUBTYPE', (str,16)), ('TARGETID', 'i8'), ('DELTACHI2', 'f8'), ('BRICKNAME', (str,8))] zbest = Table(np.zeros(nqso, dtype=columns)) zbest['CHI2'][:] = 0. zbest['Z'][:] = metadata['Z'] zbest['ZERR'][:] = 0. zbest['ZWARN'][:] = 0 zbest['SPECTYPE'][:] = 'QSO' zbest['SUBTYPE'][:] = '' zbest['TARGETID'][:] = metadata['MOCKID'] zbest['DELTACHI2'][:] = 25. hzbest = pyfits.convenience.table_to_hdu(zbest); hzbest.name='ZBEST' hfmap = pyfits.convenience.table_to_hdu(fibermap); hfmap.name='FIBERMAP' hdulist =pyfits.HDUList([pyfits.PrimaryHDU(),hzbest,hfmap]) hdulist.writeto(zbest_filename, overwrite=True) hdulist.close() # see if this helps with memory issue
def dust_transmission(wave,ebv) : Rv = 3.1 extinction = ext_odonnell(wave,Rv=Rv) return 10**(-Rv*extinction*ebv/2.5)
def dust_transmission(wave, ebv): Rv = 3.1 extinction = ext_odonnell(wave, Rv=Rv) return 10**(-Rv * extinction * ebv / 2.5)