Ejemplo n.º 1
0
def bgs_sim_spectra(sim,
                    ref_obsconditions,
                    simdir,
                    overwrite=False,
                    verbose=False):
    """Generate spectra for a given set of simulation parameters with 
    the option of overwriting files.

    """
    from desisim.scripts.quickspectra import sim_spectra

    rand = np.random.RandomState(sim['seed'])
    BGS_template_maker = BGStemplates(rand=rand, verbose=verbose)

    # Generate the observing conditions table.
    simdata = bgs_write_simdata(sim,
                                ref_obsconditions,
                                simdir,
                                rand,
                                overwrite=overwrite)
    randseeds = rand.randint(0, 2**14, len(simdata)).astype(int)
    for exp, expdata in enumerate(simdata):
        randseed = randseeds[exp]
        # Generate the observing conditions dictionary.
        obs = simdata2obsconditions(expdata)

        # Generate the rest-frame templates.  Currently not writing out the rest-frame
        # templates but we could.
        flux, wave, meta = bgs_make_templates(sim, rand, BGS_template_maker)
        redshifts = np.asarray(meta['REDSHIFT'])
        truefile = os.path.join(
            simdir, sim['suffix'],
            'bgs-{}-{:03}-true.fits'.format(sim['suffix'], exp))
        if overwrite or not os.path.isfile(truefile):
            write_templates(truefile, flux, wave, meta, overwrite=True)

        spectrafile = os.path.join(
            simdir, sim['suffix'],
            'bgs-{}-{:03}.fits'.format(sim['suffix'], exp))
        if overwrite or not os.path.isfile(spectrafile):
            sourcetypes = np.array(["bgs" for i in range(sim['nspec'])])
            sim_spectra(wave,
                        flux,
                        'bgs',
                        spectrafile,
                        redshift=redshifts,
                        obsconditions=obs,
                        sourcetype=sourcetypes,
                        seed=randseed,
                        expid=exp)
        else:
            print('File {} exists...skipping.'.format(spectrafile))
Ejemplo n.º 2
0
def sim_lenssource_spectra(BGSmags, fratios, seed=None, exptime=1000., nperchunk=500,
                           infofile='lenssource-truth.fits', debug=False):
    """Build the (noisy) lens+source spectra. No redshift-fitting.

    """
    from astropy.io import fits
    from desisim.templates import BGS, ELG
    from desisim.scripts.quickspectra import sim_spectra
    from desisim.io import read_basis_templates
    from desispec.io import read_spectra
    
    rand = np.random.RandomState(seed)
    nsim = len(BGSmags)
    assert(nsim == len(fratios))

    if nperchunk > 500:
        raise ValueError('nperchunk={} exceeds the maximum number allowed by redrock'.format(nperchunk))

    nchunk = np.ceil(nsim / nperchunk).astype(int)

    # [1] Build the noise-less lens (BGS) spectra.

    # Read one healpix of the Buzzard mocks for redshift distribution.
    mockfile = os.path.join(os.getenv('DESI_ROOT'), 'mocks', 'buzzard', 'buzzard_v1.6_desicut',
                            '8', '0', '0', 'Buzzard_v1.6_lensed-8-0.fits')
    print('Reading {}'.format(mockfile))
    mock_BGS = Table(fitsio.read(mockfile)) #columns='lmag z'.split()
    
    # From the BGS template library, select a reddish galaxy
    tflux, twave, tmeta_BGS = read_basis_templates('BGS')
    i = np.argmin(np.abs(2.0 - tmeta_BGS['D4000']))
    iredBGS = i
    redspecBGS = tflux[i, :]
    Itempl_BGS = np.array([iredBGS])

    # LMAG: observed mag, DECam grizY
    mock_mag_r = mock_BGS['LMAG'][:, 1] # r-band
    dm = 0.01
    zz_BGS = np.zeros_like(BGSmags)
    for ii, mag in enumerate(BGSmags):
        I = np.flatnonzero(np.abs(mock_mag_r - mag) <= dm)
        zz_BGS[ii] = mock_BGS['Z'][rand.choice(I, size=1, replace=False)]

    input_meta_BGS = Table()
    input_meta_BGS['TEMPLATEID'] = [Itempl_BGS]*nsim
    input_meta_BGS['SEED'] = np.arange(nsim) # [seed]*nsim # 
    input_meta_BGS['REDSHIFT'] = zz_BGS
    input_meta_BGS['MAG'] = BGSmags
    input_meta_BGS['MAGFILTER'] = ['decam2014-r']*nsim

    BGSflux, BGSwave, BGSmeta, BGSobjmeta = BGS().make_templates(
        input_meta=input_meta_BGS, nocolorcuts=True, seed=seed)

    # [2] Build the noise-less source (ELG) spectra.
    #ELGmags = maggen(BGSmags[:, np.newaxis], fratios[np.newaxis, :])
    ELGmags = maggen(BGSmags, fratios)

    # Select a single ELG template.
    tflux, twave, tmeta_ELG = read_basis_templates('ELG')
    i = np.argmin(np.abs(1.0 - tmeta_ELG['D4000'])) # MIGHT NEED TO ADJUST THIS LINE 
    iblueELG = i
    bluespecELG = tflux[i, :]
    Itempl_ELG = np.array([iblueELG])

    # uncorrelated redshifts
    zmin_ELG, zmax_ELG = 0.8, 1.4
    zz_ELG = rand.uniform(zmin_ELG, zmax_ELG, nsim)
    
    input_meta_ELG = Table()
    input_meta_ELG['TEMPLATEID'] = [Itempl_ELG]*nsim
    input_meta_ELG['SEED'] = [3]*nsim # [seed]*nsim # np.arange(nsim) hack!
    input_meta_ELG['REDSHIFT'] = zz_ELG
    input_meta_ELG['MAG'] = ELGmags
    input_meta_ELG['MAGFILTER'] = ['decam2014-r']*nsim

    ELGflux, ELGwave, ELGmeta, ELGobjmeta = ELG().make_templates(
        input_meta=input_meta_ELG, nocolorcuts=True, seed=seed)
    assert(np.all(BGSwave == ELGwave))

    # Pack the simulation info into a table, for convenience.
    siminfo = Table()
    siminfo['TARGETID'] = np.arange(nsim, dtype=np.int64)
    siminfo['LENS_Z'] = input_meta_BGS['REDSHIFT'].astype('f4')
    siminfo['LENS_MAG'] = input_meta_BGS['MAG'].astype('f4')
    siminfo['SOURCE_Z'] = input_meta_ELG['REDSHIFT'].astype('f4')
    siminfo['SOURCE_MAG'] = input_meta_ELG['MAG'].astype('f4')
    siminfo['FRATIO'] = fratios.astype('f4')
    siminfo['CHUNK'] = np.zeros(nsim, dtype=np.int32)

    # Generate simulated DESI spectra given real spectra and observing
    # conditions. Divide the sample into chunks with a fixed number of
    # spectra per chunk (but no more than 500).
    obscond = {'AIRMASS': 1.3, 'EXPTIME': exptime, 'SEEING': 1.1,
               'MOONALT': -60, 'MOONFRAC': 0.0, 'MOONSEP': 180}

    simflux = BGSflux + ELGflux
    simwave = BGSwave

    for ichunk in np.arange(nchunk):
        specfile = 'lenssource-spectra-chunk{:03d}.fits'.format(ichunk)
        print('Writing chunk {}/{} to {}'.format(ichunk, nchunk-1, specfile))
        i1 = ichunk * nperchunk
        i2 = (ichunk+1) * nperchunk
        siminfo['CHUNK'][i1:i2] = ichunk
        sim_spectra(simwave, simflux[i1:i2, :], 'dark', specfile, obsconditions=obscond,
                    sourcetype='bgs', seed=seed, targetid=siminfo['TARGETID'][i1:i2],
                    redshift=siminfo['LENS_Z'][i1:i2])
        if debug:
            spectra = read_spectra(specfile)
            for igal in np.arange(spectra.num_targets()):
                qafile = 'lenssource-spectra-chunk{:03d}-{}.png'.format(ichunk, igal)
                fig, ax = plt.subplots()
                for band in spectra.bands:
                    ax.plot(spectra.wave[band], spectra.flux[band][igal, :])
                ax.plot(simwave, simflux[i1:i2, :][igal, :], color='k', lw=2)
                ax.set_ylim(np.median(simflux[i1:i2, :][igal, :]) + np.std(spectra.flux['r'][igal, :]) * np.array([-1.5, 3]))
                fig.savefig(qafile)
                plt.close()
                
    # write out and return
    hduflux = fits.PrimaryHDU(simflux)
    hduflux.header['EXTNAME'] = 'FLUX'
    hduflux.header['BUNIT'] = '10^(-17) erg/(s cm2 Angstrom)'

    hdubgs = fits.ImageHDU(BGSflux)
    hdubgs.header['EXTNAME'] = 'BGSFLUX'

    hduelg = fits.ImageHDU(ELGflux)
    hduelg.header['EXTNAME'] = 'ELGFLUX'

    hduwave = fits.ImageHDU(simwave)
    hduwave.header['EXTNAME'] = 'WAVE'
    hduwave.header['BUNIT'] = 'Angstrom'
    hduwave.header['AIRORVAC'] = ('vac', 'vacuum wavelengths')

    hdutable = fits.convenience.table_to_hdu(siminfo)
    hdutable.header['EXTNAME'] = 'METADATA'

    hx = fits.HDUList([hduflux, hdubgs, hduelg, hduwave, hdutable])

    print('Writing {}'.format(infofile))
    hx.writeto(infofile, overwrite=True)

    return siminfo
Ejemplo n.º 3
0
def main(args=None):
    
    log = get_logger()

    if isinstance(args, (list, tuple, type(None))):
        args = parse(args)

    thedate = datetime.now().strftime('%Y-%m-%d')

    # Generate transient model if one is specified.
    trans_model = None
    if args.transient is not None:
        trans_model = transients.get_model(args.transient)

    # Generate list of HEALPix pixels to randomly sample the mocks.
    rng = np.random.RandomState(args.seed)
    nside = args.nside
    healpixels = _get_healpixels_in_footprint(nside=args.nside)
    npix = np.minimum(10*args.nsim, len(healpixels))
    pixels = rng.choice(healpixels, size=npix, replace=False)
    ipix = iter(pixels)

    # Set up the template generator.
    fluxratio_range = 10**(-np.sort(args.magrange)[::-1] / 2.5)
    epoch_range = np.sort(args.epochrange)

    if args.host == 'bgs':
        maker = BGSMaker(seed=args.seed)
        tmpl_maker = BGS
    elif args.host == 'elg':
        maker = ELGMaker(seed=args.seed)
        tmpl_maker = ELG
    elif args.host == 'lrg':
        maker = LRGMaker(seed=args.seed)
        tmpl_maker = LRG
    else:
        raise ValueError('Unusable host type {}'.format(args.host))

    maker.template_maker = tmpl_maker(transient=trans_model,
                                      tr_fluxratio=fluxratio_range,
                                      tr_epoch=epoch_range)

    for j in range(args.nsim):
        # Loop until finding a non-empty healpixel with mock galaxies.
        tdata = []
        while len(tdata) == 0:
            pixel = next(ipix)
            tdata = maker.read(healpixels=pixel, nside=args.nside)

        # Generate spectral templates and write them to truth files.
        # Keep producing templates until we have enough to pass brightness cuts.
        wave = None
        flux, targ, truth, objtr = [], [], [], []
        
        ntosim = np.min([args.nspec, len(tdata['RA'])])
        ngood = 0

        while ngood < args.nspec:
            idx = rng.choice(len(tdata['RA']), ntosim)
            tflux, twave, ttarg, ttruth, tobj = maker.make_spectra(tdata, indx=idx)
            g, r, z, w1, w2 = [ttruth['FLUX_{}'.format(_)] for _ in ['G','R','Z','W1','W2']]
            rfib = ttarg['FIBERFLUX_R']
#            print(g, r, z, w1, w2, rfib)

            # Apply color cuts.
            is_bright = isBGS_colors(rfib, g, r, z, w1, w2, targtype='bright')
            is_faint  = isBGS_colors(rfib, g, r, z, w1, w2, targtype='faint')
            is_wise   = isBGS_colors(rfib, g, r, z, w1, w2, targtype='wise')

            keep = np.logical_or.reduce([is_bright, is_faint, is_wise])

            _ngood = np.count_nonzero(keep)
            if _ngood > 0:
                ngood += _ngood
                flux.append(tflux[keep, :])
                targ.append(ttarg[keep])
                truth.append(ttruth[keep])
                objtr.append(tobj[keep])

        wave = maker.wave
        flux = np.vstack(flux)[:args.nspec, :]
        targ = vstack(targ)[:args.nspec]
        truth = vstack(truth)[:args.nspec]
        objtr = vstack(objtr)[:args.nspec]

        # Set up and verify the TARGETID across all truth tables.
        n = len(truth)
        new_id = 10000*pixel + 100*j + np.arange(1, n+1)
        targ['OBJID'][:] = new_id
        truth['TARGETID'][:] = new_id
        objtr['TARGETID'][:] = new_id

        assert(len(truth) == args.nspec)
        assert(np.all(targ['OBJID'] == truth['TARGETID']))
        assert(len(targ) == len(np.unique(targ['OBJID'])))
        assert(len(truth) == len(np.unique(truth['TARGETID'])))
        assert(len(objtr) == len(np.unique(objtr['TARGETID'])))

        truthfile = os.path.join(args.outdir,
                     '{}_{}_{:04d}s_{:03d}_truth.fits'.format(args.host, thedate, int(args.exptime), j+1))
        write_templates(truthfile, flux, wave, targ, truth, objtr)

        # Get observing conditions and generate spectra.
        obs = dict(AIRMASS=args.airmass, EXPTIME=args.exptime,
                   MOONALT=args.moonalt, MOONFRAC=args.moonfrac,
                   MOONSEP=args.moonsep, SEEING=args.seeing)

        fcols = dict(BRICKID=targ['BRICKID'],
                     BRICK_OBJID=targ['OBJID'],
                     FLUX_G=targ['FLUX_G'],
                     FLUX_R=targ['FLUX_R'],
                     FLUX_Z=targ['FLUX_Z'],
                     FLUX_W1=targ['FLUX_W1'],
                     FLUX_W2=targ['FLUX_W2'],
                     FLUX_IVAR_G=targ['FLUX_IVAR_G'],
                     FLUX_IVAR_R=targ['FLUX_IVAR_R'],
                     FLUX_IVAR_Z=targ['FLUX_IVAR_Z'],
                     FLUX_IVAR_W1=targ['FLUX_IVAR_W1'],
                     FLUX_IVAR_W2=targ['FLUX_IVAR_W2'],
                     FIBERFLUX_G=targ['FIBERFLUX_G'],
                     FIBERFLUX_R=targ['FIBERFLUX_R'],
                     FIBERFLUX_Z=targ['FIBERFLUX_Z'],
                     FIBERTOTFLUX_G=targ['FIBERTOTFLUX_G'],
                     FIBERTOTFLUX_R=targ['FIBERTOTFLUX_R'],
                     FIBERTOTFLUX_Z=targ['FIBERTOTFLUX_Z'],
                     MW_TRANSMISSION_G=targ['MW_TRANSMISSION_G'],
                     MW_TRANSMISSION_R=targ['MW_TRANSMISSION_R'],
                     MW_TRANSMISSION_Z=targ['MW_TRANSMISSION_Z'],
                     EBV=targ['EBV'])

        specfile = os.path.join(args.outdir,
                    '{}_{}_{:04d}s_{:03d}_spect.fits'.format(args.host, thedate, int(args.exptime), j+1))

        redshifts = truth['TRUEZ'] if args.host=='bgs' else None

        sim_spectra(wave, flux, args.host, specfile,
                    sourcetype=args.host,
                    obsconditions=obs, meta=obs, fibermap_columns=fcols,
                    targetid=truth['TARGETID'], redshift=redshifts,
                    ra=targ['RA'], dec=targ['DEC'],
                    seed=args.seed, expid=j)
Ejemplo n.º 4
0
def simulate_one_healpix(ifilename,
                         args,
                         model,
                         obsconditions,
                         decam_and_wise_filters,
                         footprint_healpix_weight,
                         footprint_healpix_nside,
                         seed,
                         bal=None):
    log = get_logger()

    # set seed now
    # we need a seed per healpix because
    # the spectra simulator REQUIRES a seed
    np.random.seed(seed)

    # read the header of the tranmission file to find the healpix pixel number, nside
    # and if we are lucky the scheme.
    # if this fails, try to guess it from the filename (for backward compatibility)
    healpix = -1
    nside = -1
    hpxnest = True

    hdulist = pyfits.open(ifilename)
    if "METADATA" in hdulist:
        head = hdulist["METADATA"].header
        for k in ["HPXPIXEL", "PIXNUM"]:
            if k in head:
                healpix = int(head[k])
                log.info("healpix={}={}".format(k, healpix))
                break
        for k in ["HPXNSIDE", "NSIDE"]:
            if k in head:
                nside = int(head[k])
                log.info("nside={}={}".format(k, nside))
                break
        for k in ["HPXNEST", "NESTED", "SCHEME"]:
            if k in head:
                if k == "SCHEME":
                    hpxnest = (head[k] == "NEST")
                else:
                    hpxnest = bool(head[k])
                log.info("hpxnest from {} = {}".format(k, hpxnest))
                break
    if healpix >= 0 and nside < 0:
        log.error("Read healpix in header but not nside.")
        raise ValueError("Read healpix in header but not nside.")

    if healpix < 0:
        vals = os.path.basename(ifilename).split(".")[0].split("-")
        if len(vals) < 3:
            log.error("Cannot guess nside and healpix from filename {}".format(
                ifilename))
            raise ValueError(
                "Cannot guess nside and healpix from filename {}".format(
                    ifilename))
        try:
            healpix = int(vals[-1])
            nside = int(vals[-2])
        except ValueError:
            raise ValueError(
                "Cannot guess nside and healpix from filename {}".format(
                    ifilename))
        log.warning(
            "Guessed healpix and nside from filename, assuming the healpix scheme is 'NESTED'"
        )

    zbest_filename = None
    if args.outfile:
        ofilename = args.outfile
    else:
        ofilename = os.path.join(
            args.outdir,
            "{}/{}/spectra-{}-{}.fits".format(healpix // 100, healpix, nside,
                                              healpix))
    pixdir = os.path.dirname(ofilename)

    if args.zbest:
        zbest_filename = os.path.join(
            pixdir, "zbest-{}-{}.fits".format(nside, healpix))

    if not args.overwrite:
        # check whether output exists or not
        if args.zbest:
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename):
                log.info("skip existing {} and {}".format(
                    ofilename, zbest_filename))
                return
        else:  # only test spectra file
            if os.path.isfile(ofilename):
                log.info("skip existing {}".format(ofilename))
                return

    log.info("Read skewers in {}, random seed = {}".format(ifilename, seed))

    ##ALMA: It reads only the skewers only if there are no DLAs or if they are added randomly.
    if (not args.dla or args.dla == 'random'):
        trans_wave, transmission, metadata = read_lya_skewers(ifilename)
        ok = np.where((metadata['Z'] >= args.zmin)
                      & (metadata['Z'] <= args.zmax))[0]
        transmission = transmission[ok]
        metadata = metadata[:][ok]
##ALMA:Added to read dla_info

    elif (args.dla == 'file'):
        log.info("Read DLA information in {}".format(ifilename))
        trans_wave, transmission, metadata, dla_info = read_lya_skewers(
            ifilename, dla_='TRUE')
        ok = np.where((metadata['Z'] >= args.zmin)
                      & (metadata['Z'] <= args.zmax))[0]
        transmission = transmission[ok]
        metadata = metadata[:][ok]
    else:
        log.error(
            'Not a valid option to add DLAs. Valid options are "random" or "file"'
        )
        sys.exit(1)

    if args.dla:
        dla_NHI, dla_z, dla_id = [], [], []
        dla_filename = os.path.join(pixdir,
                                    "dla-{}-{}.fits".format(nside, healpix))

    if args.desi_footprint:
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside,
                                                metadata["RA"],
                                                metadata["DEC"])
        selection = np.where(
            footprint_healpix_weight[footprint_healpix] > 0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(
            transmission.shape[0], selection.size))
        if selection.size == 0:
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]

    nqso = transmission.shape[0]
    if args.downsampling is not None:
        if args.downsampling <= 0 or args.downsampling > 1:
            log.error(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
            raise ValueError(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
        indices = np.where(np.random.uniform(size=nqso) < args.downsampling)[0]
        if indices.size == 0:
            log.warning(
                "Down sampling from {} to 0 (by chance I presume)".format(
                    nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        nqso = transmission.shape[0]


##ALMA:added to set transmission to 1 for z>zqso, this can be removed when transmission is corrected.
    for ii in range(len(metadata)):
        transmission[ii][trans_wave > 1215.67 * (metadata[ii]['Z'] + 1)] = 1.0

    if (args.dla == 'file'):
        log.info('Adding DLAs from transmision file')
        min_trans_wave = np.min(trans_wave / 1215.67 - 1)
        for ii in range(len(metadata)):
            if min_trans_wave < metadata[ii]['Z']:
                idd = metadata['MOCKID'][ii]
                dlas = dla_info[dla_info['MOCKID'] == idd]
                dlass = []
                for i in range(len(dlas)):
                    ##Adding only dlas between zqso and 1.95, check again for the next version of London mocks...
                    if (dlas[i]['Z_DLA'] <
                            metadata[ii]['Z']) and (dlas[i]['Z_DLA'] > 1.95):
                        dlass.append(
                            dict(z=dlas[i]['Z_DLA'] + dlas[i]['DZ_DLA'],
                                 N=dlas[i]['N_HI_DLA']))
                if len(dlass) > 0:
                    dla_model = dla_spec(trans_wave, dlass)
                    transmission[ii] = dla_model * transmission[ii]
                    dla_z += [idla['z'] for idla in dlass]
                    dla_NHI += [idla['N'] for idla in dlass]
                    dla_id += [idd] * len(dlass)

    elif (args.dla == 'random'):
        log.info('Adding DLAs randomly')
        min_trans_wave = np.min(trans_wave / 1215.67 - 1)
        for ii in range(len(metadata)):
            if min_trans_wave < metadata[ii]['Z']:
                idd = metadata['MOCKID'][ii]
                dlass, dla_model = insert_dlas(trans_wave, metadata[ii]['Z'])
                if len(dlass) > 0:
                    transmission[ii] = dla_model * transmission[ii]
                    dla_z += [idla['z'] for idla in dlass]
                    dla_NHI += [idla['N'] for idla in dlass]
                    dla_id += [idd] * len(dlass)

    if args.dla:
        if len(dla_id) > 0:
            dla_meta = Table()
            dla_meta['NHI'] = dla_NHI
            dla_meta['z'] = dla_z
            dla_meta['ID'] = dla_id

    if args.nmax is not None:
        if args.nmax < nqso:
            log.info(
                "Limit number of QSOs from {} to nmax={} (random subsample)".
                format(nqso, args.nmax))
            # take a random subsample
            indices = (np.random.uniform(size=args.nmax) * nqso).astype(int)
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            nqso = args.nmax

            if args.dla:
                dla_meta = dla_meta[:][dla_meta['ID'] == metadata['MOCKID']]

    if args.target_selection or args.mags:
        wanted_min_wave = 3329.  # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
        wanted_max_wave = 55501.  # needed to compute magnitudes for wise2010-W2

        if trans_wave[0] > wanted_min_wave:
            log.info(
                "Increase wavelength range from {}:{} to {}:{} to compute magnitudes"
                .format(int(trans_wave[0]), int(trans_wave[-1]),
                        int(wanted_min_wave), int(trans_wave[-1])))
            # pad with zeros at short wavelength because we assume transmission = 0
            # and we don't need any wavelength resolution here
            new_trans_wave = np.append([wanted_min_wave, trans_wave[0] - 0.01],
                                       trans_wave)
            new_transmission = np.zeros(
                (transmission.shape[0], new_trans_wave.size))
            new_transmission[:, 2:] = transmission
            trans_wave = new_trans_wave
            transmission = new_transmission

        if trans_wave[-1] < wanted_max_wave:
            log.info(
                "Increase wavelength range from {}:{} to {}:{} to compute magnitudes"
                .format(int(trans_wave[0]), int(trans_wave[-1]),
                        int(trans_wave[0]), int(wanted_max_wave)))
            # pad with ones at long wavelength because we assume transmission = 1
            coarse_dwave = 2.  # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
            n = int((wanted_max_wave - trans_wave[-1]) / coarse_dwave) + 1
            new_trans_wave = np.append(
                trans_wave,
                np.linspace(trans_wave[-1] + coarse_dwave,
                            trans_wave[-1] + coarse_dwave * (n + 1), n))
            new_transmission = np.ones(
                (transmission.shape[0], new_trans_wave.size))
            new_transmission[:, :trans_wave.size] = transmission
            trans_wave = new_trans_wave
            transmission = new_transmission

    log.info("Simulate {} QSOs".format(nqso))
    tmp_qso_flux, tmp_qso_wave, meta = model.make_templates(
        nmodel=nqso,
        redshift=metadata['Z'],
        lyaforest=False,
        nocolorcuts=True,
        noresample=True,
        seed=seed)

    log.info("Resample to transmission wavelength grid")
    # because we don't want to alter the transmission field with resampling here
    qso_flux = np.zeros((tmp_qso_flux.shape[0], trans_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = np.interp(trans_wave, tmp_qso_wave, tmp_qso_flux[q])
    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    ##To add BALs to be checked by Luz and Jaime
    if (args.balprob):
        if (args.balprob <= 1. and args.balprob > 0):
            log.info("Adding BALs with probability {}".format(args.balprob))
            tmp_qso_flux, meta_bal = bal.insert_bals(tmp_qso_wave,
                                                     tmp_qso_flux,
                                                     metadata['Z'],
                                                     balprob=args.balprob,
                                                     seed=seed)
        else:
            log.error("Probability to add BALs is not between 0 and 1")
            sys.exit(1)

    log.info("Apply lya")
    tmp_qso_flux = apply_lya_transmission(tmp_qso_wave, tmp_qso_flux,
                                          trans_wave, transmission)

    if args.metals is not None:
        lstMetals = ''
        for m in args.metals:
            lstMetals += m + ', '
        log.info("Apply metals: {}".format(lstMetals[:-2]))
        tmp_qso_flux = apply_metals_transmission(tmp_qso_wave, tmp_qso_flux,
                                                 trans_wave, transmission,
                                                 args.metals)

    bbflux = None
    if args.target_selection or args.mags:
        bands = ['FLUX_G', 'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2']
        bbflux = dict()
        # need to recompute the magnitudes to account for lya transmission
        log.info("Compute QSO magnitudes")
        maggies = decam_and_wise_filters.get_ab_maggies(
            1e-17 * tmp_qso_flux, tmp_qso_wave)
        for band, filt in zip(bands, [
                'decam2014-g', 'decam2014-r', 'decam2014-z', 'wise2010-W1',
                'wise2010-W2'
        ]):

            bbflux[band] = np.ma.getdata(1e9 * maggies[filt])  # nanomaggies

    if args.target_selection:
        log.info("Apply target selection")
        isqso = isQSO_colors(gflux=bbflux['FLUX_G'],
                             rflux=bbflux['FLUX_R'],
                             zflux=bbflux['FLUX_Z'],
                             w1flux=bbflux['FLUX_W1'],
                             w2flux=bbflux['FLUX_W2'])
        log.info("Target selection: {}/{} QSOs selected".format(
            np.sum(isqso), nqso))
        selection = np.where(isqso)[0]
        if selection.size == 0: return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata = metadata[:][selection]
        meta = meta[:][selection]
        for band in bands:
            bbflux[band] = bbflux[band][selection]
        nqso = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # we need a linear grid. for this resampling we take care of integrating in bins
    # we do not do a simple interpolation
    qso_wave = np.linspace(args.wmin, args.wmax,
                           int((args.wmax - args.wmin) / args.dwave) + 1)
    qso_flux = np.zeros((tmp_qso_flux.shape[0], qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = resample_flux(qso_wave, tmp_qso_wave, tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    pixdir = os.path.dirname(ofilename)
    if len(pixdir) > 0:
        if not os.path.isdir(pixdir):
            log.info("Creating dir {}".format(pixdir))
            os.makedirs(pixdir)

    if "MOCKID" in metadata.dtype.names:
        #log.warning("Using MOCKID as TARGETID")
        targetid = np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names:
        log.warning("Using ID as TARGETID")
        targetid = np.array(metadata["ID"]).astype(int)
    else:
        log.warning("No TARGETID")
        targetid = None

    meta = {"HPXNSIDE": nside, "HPXPIXEL": healpix, "HPXNEST": hpxnest}

    if args.target_selection or args.mags:
        # today we write mags because that's what is in the fibermap
        mags = np.zeros((qso_flux.shape[0], 5))
        for i, band in enumerate(bands):
            jj = (bbflux[band] > 0)
            mags[jj,
                 i] = 22.5 - 2.5 * np.log10(bbflux[band][jj])  # AB magnitudes
        fibermap_columns = {"MAG": mags}
    else:
        fibermap_columns = None

    sim_spectra(qso_wave,
                qso_flux,
                args.program,
                obsconditions=obsconditions,
                spectra_filename=ofilename,
                sourcetype="qso",
                skyerr=args.skyerr,
                ra=metadata["RA"],
                dec=metadata["DEC"],
                targetid=targetid,
                meta=meta,
                seed=seed,
                fibermap_columns=fibermap_columns)

    if args.zbest:
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)

        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [('CHI2', 'f8'), ('COEFF', 'f8', (4, )), ('Z', 'f8'),
                   ('ZERR', 'f8'), ('ZWARN', 'i8'), ('SPECTYPE', (str, 96)),
                   ('SUBTYPE', (str, 16)), ('TARGETID', 'i8'),
                   ('DELTACHI2', 'f8'), ('BRICKNAME', (str, 8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest["CHI2"][:] = 0.
        zbest["Z"] = metadata['Z']
        zbest["ZERR"][:] = 0.
        zbest["ZWARN"][:] = 0
        zbest["SPECTYPE"][:] = "QSO"
        zbest["SUBTYPE"][:] = ""
        zbest["TARGETID"] = fibermap["TARGETID"]
        zbest["DELTACHI2"][:] = 25.

        hzbest = pyfits.convenience.table_to_hdu(zbest)
        hzbest.name = "ZBEST"
        hfmap = pyfits.convenience.table_to_hdu(fibermap)
        hfmap.name = "FIBERMAP"

        hdulist = pyfits.HDUList([pyfits.PrimaryHDU(), hzbest, hfmap])
        hdulist.writeto(zbest_filename, clobber=True)
        hdulist.close()  # see if this helps with memory issue

        if args.dla:
            #This will change according to discussion
            log.info("Updating the spectra file to add DLA metadata {}".format(
                ofilename))
            hdudla = pyfits.table_to_hdu(dla_meta)
            hdudla.name = "DLA_META"
            hdul = pyfits.open(ofilename, mode='update')
            hdul.append(hdudla)
            hdul.flush()
            hdul.close()
Ejemplo n.º 5
0
def simulate_one_healpix(ifilename, args, model, obsconditions,
                         decam_and_wise_filters, footprint_healpix_weight,
                         footprint_healpix_nside, seed):

    log = get_logger()

    # set seed now
    # we need a seed per healpix because
    # the spectra simulator REQUIRES a seed
    np.random.seed(seed)

    healpix = 0
    nside = 0
    vals = os.path.basename(ifilename).split(".")[0].split("-")
    if len(vals) < 3:
        log.error("Cannot guess nside and healpix from filename {}".format(
            ifilename))
        raise ValueError(
            "Cannot guess nside and healpix from filename {}".format(
                ifilename))
    try:
        healpix = int(vals[-1])
        nside = int(vals[-2])
    except ValueError:
        raise ValueError(
            "Cannot guess nside and healpix from filename {}".format(
                ifilename))

    zbest_filename = None
    if args.outfile:
        ofilename = args.outfile
    else:
        ofilename = os.path.join(
            args.outdir,
            "{}/{}/spectra-{}-{}.fits".format(healpix // 100, healpix, nside,
                                              healpix))
    pixdir = os.path.dirname(ofilename)

    if args.zbest:
        zbest_filename = os.path.join(
            pixdir, "zbest-{}-{}.fits".format(nside, healpix))

    if not args.overwrite:
        # check whether output exists or not
        if args.zbest:
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename):
                log.info("skip existing {} and {}".format(
                    ofilename, zbest_filename))
                return
        else:  # only test spectra file
            if os.path.isfile(ofilename):
                log.info("skip existing {}".format(ofilename))
                return

    log.info("Read skewers in {}, random seed = {}".format(ifilename, seed))
    trans_wave, transmission, metadata = read_lya_skewers(ifilename)
    ok = np.where((metadata['Z'] >= args.zmin)
                  & (metadata['Z'] <= args.zmax))[0]
    transmission = transmission[ok]
    metadata = metadata[:][ok]

    # create quasars

    if args.desi_footprint:
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside,
                                                metadata["RA"],
                                                metadata["DEC"])
        selection = np.where(
            footprint_healpix_weight[footprint_healpix] > 0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(
            transmission.shape[0], selection.size))
        if selection.size == 0:
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]

    nqso = transmission.shape[0]
    if args.downsampling is not None:
        if args.downsampling <= 0 or args.downsampling > 1:
            log.error(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
            raise ValueError(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
        indices = np.where(np.random.uniform(size=nqso) < args.downsampling)[0]
        if indices.size == 0:
            log.warning(
                "Down sampling from {} to 0 (by chance I presume)".format(
                    nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        nqso = transmission.shape[0]

    if args.nmax is not None:
        if args.nmax < nqso:
            log.info(
                "Limit number of QSOs from {} to nmax={} (random subsample)".
                format(nqso, args.nmax))
            # take a random subsample
            indices = (np.random.uniform(size=args.nmax) * nqso).astype(int)
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            nqso = args.nmax

    if args.target_selection or args.mags:
        wanted_min_wave = 3329.  # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
        wanted_max_wave = 55501.  # needed to compute magnitudes for wise2010-W2

        if trans_wave[0] > wanted_min_wave:
            log.info(
                "Increase wavelength range from {}:{} to {}:{} to compute magnitudes"
                .format(int(trans_wave[0]), int(trans_wave[-1]),
                        int(wanted_min_wave), int(trans_wave[-1])))
            # pad with zeros at short wavelength because we assume transmission = 0
            # and we don't need any wavelength resolution here
            new_trans_wave = np.append([wanted_min_wave, trans_wave[0] - 0.01],
                                       trans_wave)
            new_transmission = np.zeros(
                (transmission.shape[0], new_trans_wave.size))
            new_transmission[:, 2:] = transmission
            trans_wave = new_trans_wave
            transmission = new_transmission

        if trans_wave[-1] < wanted_max_wave:
            log.info(
                "Increase wavelength range from {}:{} to {}:{} to compute magnitudes"
                .format(int(trans_wave[0]), int(trans_wave[-1]),
                        int(trans_wave[0]), int(wanted_max_wave)))
            # pad with ones at long wavelength because we assume transmission = 1
            coarse_dwave = 2.  # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
            n = int((wanted_max_wave - trans_wave[-1]) / coarse_dwave) + 1
            new_trans_wave = np.append(
                trans_wave,
                np.linspace(trans_wave[-1] + coarse_dwave,
                            trans_wave[-1] + coarse_dwave * (n + 1), n))
            new_transmission = np.ones(
                (transmission.shape[0], new_trans_wave.size))
            new_transmission[:, :trans_wave.size] = transmission
            trans_wave = new_trans_wave
            transmission = new_transmission

    log.info("Simulate {} QSOs".format(nqso))
    tmp_qso_flux, tmp_qso_wave, meta = model.make_templates(
        nmodel=nqso,
        redshift=metadata['Z'],
        lyaforest=False,
        nocolorcuts=True,
        noresample=True,
        seed=seed)

    log.info("Resample to transmission wavelength grid")
    # because we don't want to alter the transmission field with resampling here
    qso_flux = np.zeros((tmp_qso_flux.shape[0], trans_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = np.interp(trans_wave, tmp_qso_wave, tmp_qso_flux[q])
    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    log.info("Apply lya")
    tmp_qso_flux = apply_lya_transmission(tmp_qso_wave, tmp_qso_flux,
                                          trans_wave, transmission)

    bbflux = None
    if args.target_selection or args.mags:
        bands = ['FLUX_G', 'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2']
        bbflux = dict()
        # need to recompute the magnitudes to account for lya transmission
        log.info("Compute QSO magnitudes")
        maggies = decam_and_wise_filters.get_ab_maggies(
            1e-17 * tmp_qso_flux, tmp_qso_wave)
        for band, filt in zip(bands, [
                'decam2014-g', 'decam2014-r', 'decam2014-z', 'wise2010-W1',
                'wise2010-W2'
        ]):

            bbflux[band] = np.ma.getdata(1e9 * maggies[filt])  # nanomaggies

    if args.target_selection:
        log.info("Apply target selection")
        isqso = isQSO_colors(gflux=bbflux['FLUX_G'],
                             rflux=bbflux['FLUX_R'],
                             zflux=bbflux['FLUX_Z'],
                             w1flux=bbflux['FLUX_W1'],
                             w2flux=bbflux['FLUX_W2'])
        log.info("Target selection: {}/{} QSOs selected".format(
            np.sum(isqso), nqso))
        selection = np.where(isqso)[0]
        if selection.size == 0: return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata = metadata[:][selection]
        meta = meta[:][selection]
        for band in bands:
            bbflux[band] = bbflux[band][selection]
        nqso = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # we need a linear grid. for this resampling we take care of integrating in bins
    # we do not do a simple interpolation
    qso_wave = np.linspace(args.wmin, args.wmax,
                           int((args.wmax - args.wmin) / args.dwave) + 1)
    qso_flux = np.zeros((tmp_qso_flux.shape[0], qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = resample_flux(qso_wave, tmp_qso_wave, tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    pixdir = os.path.dirname(ofilename)
    if len(pixdir) > 0:
        if not os.path.isdir(pixdir):
            log.info("Creating dir {}".format(pixdir))
            os.makedirs(pixdir)

    if "MOCKID" in metadata.dtype.names:
        #log.warning("Using MOCKID as TARGETID")
        targetid = np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names:
        log.warning("Using ID as TARGETID")
        targetid = np.array(metadata["ID"]).astype(int)
    else:
        log.warning("No TARGETID")
        targetid = None

    log.warning("Assuming the healpix scheme is 'NESTED'")
    meta = {"HPXNSIDE": nside, "HPXPIXEL": healpix, "HPXNEST": True}

    if args.target_selection or args.mags:
        # today we write mags because that's what is in the fibermap
        mags = np.zeros((qso_flux.shape[0], 5))
        for i, band in enumerate(bands):
            jj = (bbflux[band] > 0)
            mags[jj,
                 i] = 22.5 - 2.5 * np.log10(bbflux[band][jj])  # AB magnitudes
        fibermap_columns = {"MAG": mags}
    else:
        fibermap_columns = None

    sim_spectra(qso_wave,
                qso_flux,
                args.program,
                obsconditions=obsconditions,
                spectra_filename=ofilename,
                sourcetype="qso",
                skyerr=args.skyerr,
                ra=metadata["RA"],
                dec=metadata["DEC"],
                targetid=targetid,
                meta=meta,
                seed=seed,
                fibermap_columns=fibermap_columns)

    if args.zbest:
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)

        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [('CHI2', 'f8'), ('COEFF', 'f8', (4, )), ('Z', 'f8'),
                   ('ZERR', 'f8'), ('ZWARN', 'i8'), ('SPECTYPE', (str, 96)),
                   ('SUBTYPE', (str, 16)), ('TARGETID', 'i8'),
                   ('DELTACHI2', 'f8'), ('BRICKNAME', (str, 8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest["CHI2"][:] = 0.
        zbest["Z"] = metadata['Z']
        zbest["ZERR"][:] = 0.
        zbest["ZWARN"][:] = 0
        zbest["SPECTYPE"][:] = "QSO"
        zbest["SUBTYPE"][:] = ""
        zbest["TARGETID"] = fibermap["TARGETID"]
        zbest["DELTACHI2"][:] = 25.

        hzbest = pyfits.convenience.table_to_hdu(zbest)
        hzbest.name = "ZBEST"
        hfmap = pyfits.convenience.table_to_hdu(fibermap)
        hfmap.name = "FIBERMAP"

        hdulist = pyfits.HDUList([pyfits.PrimaryHDU(), hzbest, hfmap])
        hdulist.writeto(zbest_filename, clobber=True)
        hdulist.close()  # see if this helps with memory issue
Ejemplo n.º 6
0
def simulate_one_healpix(ifilename,args,model,obsconditions,decam_and_wise_filters,
                         bassmzls_and_wise_filters,footprint_healpix_weight,
                         footprint_healpix_nside,
                         bal=None,sfdmap=None,eboss=None) :
    log = get_logger()

    # open filename and extract basic HEALPix information
    pixel, nside, hpxnest = get_healpix_info(ifilename)

    # using global seed (could be None) get seed for this particular pixel
    global_seed = args.seed
    seed = get_pixel_seed(pixel, nside, global_seed)
    # use this seed to generate future random numbers
    np.random.seed(seed)

    # get output file (we will write there spectra for this HEALPix pixel)
    ofilename = get_spectra_filename(args,nside,pixel)
    # get directory name (we will also write there zbest file)
    pixdir = os.path.dirname(ofilename)

    # get filename for truth file
    truth_filename = get_truth_filename(args,pixdir,nside,pixel)

    # get filename for zbest file
    zbest_filename = get_zbest_filename(args,pixdir,nside,pixel)

    if not args.overwrite :
        # check whether output exists or not
        if args.zbest :
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename) :
                log.info("skip existing {} and {}".format(ofilename,zbest_filename))
                return
        else : # only test spectra file
            if os.path.isfile(ofilename) :
                log.info("skip existing {}".format(ofilename))
                return

    # create sub-directories if required
    if len(pixdir)>0 :
        if not os.path.isdir(pixdir) :
            log.info("Creating dir {}".format(pixdir))
            os.makedirs(pixdir)

    log.info("Read skewers in {}, random seed = {}".format(ifilename,seed))

    # Read transmission from files. It might include DLA information, and it
    # might add metal transmission as well (from the HDU file).
    log.info("Read transmission file {}".format(ifilename))

    trans_wave, transmission, metadata, dla_info = read_lya_skewers(ifilename,read_dlas=(args.dla=='file'),add_metals=args.metals_from_file,add_lyb=args.add_LYB)

    ### Add Finger-of-God, before generate the continua
    log.info("Add FOG to redshift with sigma {} to quasar redshift".format(args.sigma_kms_fog))
    DZ_FOG = args.sigma_kms_fog/c*(1.+metadata['Z'])*np.random.normal(0,1,metadata['Z'].size)
    metadata['Z'] += DZ_FOG

    ### Select quasar within a given redshift range
    w = (metadata['Z']>=args.zmin) & (metadata['Z']<=args.zmax)
    transmission = transmission[w]
    metadata = metadata[:][w]
    DZ_FOG = DZ_FOG[w]

    # option to make for BOSS+eBOSS
    if not eboss is None:
        if args.downsampling or args.desi_footprint:
            raise ValueError("eboss option can not be run with "
                    +"desi_footprint or downsampling")

        # Get the redshift distribution from SDSS
        selection = sdss_subsample_redshift(metadata["RA"],metadata["DEC"],metadata['Z'],eboss['redshift'])
        log.info("Select QSOs in BOSS+eBOSS redshift distribution {} -> {}".format(metadata['Z'].size,selection.sum()))
        if selection.sum()==0:
            log.warning("No intersection with BOSS+eBOSS redshift distribution")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]

        # figure out the density of all quasars
        N_highz = metadata['Z'].size
        # area of healpix pixel, in degrees
        area_deg2 = healpy.pixelfunc.nside2pixarea(nside,degrees=True)
        input_highz_dens_deg2 = N_highz/area_deg2
        selection = sdss_subsample(metadata["RA"], metadata["DEC"],
                        input_highz_dens_deg2,eboss['footprint'])
        log.info("Select QSOs in BOSS+eBOSS footprint {} -> {}".format(transmission.shape[0],selection.size))
        if selection.size == 0 :
            log.warning("No intersection with BOSS+eBOSS footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]

    if args.desi_footprint :
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside, metadata["RA"], metadata["DEC"])
        selection = np.where(footprint_healpix_weight[footprint_healpix]>0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(transmission.shape[0],selection.size))
        if selection.size == 0 :
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]



    nqso=transmission.shape[0]
    if args.downsampling is not None :
        if args.downsampling <= 0 or  args.downsampling > 1 :
           log.error("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
           raise ValueError("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
        indices = np.where(np.random.uniform(size=nqso)<args.downsampling)[0]
        if indices.size == 0 :
            log.warning("Down sampling from {} to 0 (by chance I presume)".format(nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        DZ_FOG = DZ_FOG[indices]
        nqso = transmission.shape[0]

    if args.nmax is not None :
        if args.nmax < nqso :
            log.info("Limit number of QSOs from {} to nmax={} (random subsample)".format(nqso,args.nmax))
            # take a random subsample
            indices = np.random.choice(np.arange(nqso),args.nmax,replace=False)  ##Use random.choice instead of random.uniform (rarely but it does cause a duplication of qsos) 
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            DZ_FOG = DZ_FOG[indices]
            nqso = args.nmax

    # In previous versions of the London mocks we needed to enforce F=1 for
    # z > z_qso here, but this is not needed anymore. Moreover, now we also
    # have metal absorption that implies F < 1 for z > z_qso
    #for ii in range(len(metadata)):
    #    transmission[ii][trans_wave>lambda_RF_LYA*(metadata[ii]['Z']+1)]=1.0

    # if requested, add DLA to the transmission skewers
    if args.dla is not None :

        # if adding random DLAs, we will need a new random generator
        if args.dla=='random':
            log.info('Adding DLAs randomly')
            random_state_just_for_dlas = np.random.RandomState(seed)
        elif args.dla=='file':
            log.info('Adding DLAs from transmission file')
        else:
            log.error("Wrong option for args.dla: "+args.dla)
            sys.exit(1)

        # if adding DLAs, the information will be printed here
        dla_filename=os.path.join(pixdir,"dla-{}-{}.fits".format(nside,pixel))
        dla_NHI, dla_z, dla_qid,dla_id = [], [], [],[]

        # identify minimum Lya redshift in transmission files
        min_lya_z = np.min(trans_wave/lambda_RF_LYA - 1)

        # loop over quasars in pixel

        for ii in range(len(metadata)):

            # quasars with z < min_z will not have any DLA in spectrum
            if min_lya_z>metadata['Z'][ii]: continue

            # quasar ID
            idd=metadata['MOCKID'][ii]
            dlas=[]

            if args.dla=='file':
                for dla in dla_info[dla_info['MOCKID']==idd]:

                    # Adding only DLAs with z < zqso
                    if dla['Z_DLA_RSD']>=metadata['Z'][ii]: continue
                    dlas.append(dict(z=dla['Z_DLA_RSD'],N=dla['N_HI_DLA'],dlaid=dla['DLAID']))
                transmission_dla = dla_spec(trans_wave,dlas)

            elif args.dla=='random':
                dlas, transmission_dla = insert_dlas(trans_wave, metadata['Z'][ii], rstate=random_state_just_for_dlas)
                for idla in dlas:
                   idla['dlaid']+=idd*1000      #Added to have unique DLA ids. Same format as DLAs from file.

            # multiply transmissions and store information for the DLA file
            if len(dlas)>0:
                transmission[ii] = transmission_dla * transmission[ii]
                dla_z += [idla['z'] for idla in dlas]
                dla_NHI += [idla['N'] for idla in dlas]
                dla_id += [idla['dlaid'] for idla in dlas]
                dla_qid += [idd]*len(dlas)
        log.info('Added {} DLAs'.format(len(dla_id)))
        # write file with DLA information
        if len(dla_id)>0:
            dla_meta=Table()
            dla_meta['NHI'] = dla_NHI
            dla_meta['Z_DLA'] = dla_z  #This is Z_DLA_RSD in transmision.
            dla_meta['TARGETID']=dla_qid
            dla_meta['DLAID'] = dla_id
            hdu_dla = pyfits.convenience.table_to_hdu(dla_meta)
            hdu_dla.name="DLA_META"
            del(dla_meta)
            log.info("DLA metadata to be saved in {}".format(truth_filename))
        else:
            hdu_dla=pyfits.PrimaryHDU()
            hdu_dla.name="DLA_META"

    # if requested, extend transmission skewers to cover full spectrum
    if args.target_selection or args.bbflux :
        wanted_min_wave = 3329. # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
        wanted_max_wave = 55501. # needed to compute magnitudes for wise2010-W2

        if trans_wave[0]>wanted_min_wave :
            log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(wanted_min_wave),int(trans_wave[-1])))
            # pad with ones at short wavelength, we assume F = 1 for z <~ 1.7
            # we don't need any wavelength resolution here
            new_trans_wave = np.append([wanted_min_wave,trans_wave[0]-0.01],trans_wave)
            new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
            new_transmission[:,2:] = transmission
            trans_wave   = new_trans_wave
            transmission = new_transmission

        if trans_wave[-1]<wanted_max_wave :
            log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(trans_wave[0]),int(wanted_max_wave)))
            # pad with ones at long wavelength because we assume F = 1
            coarse_dwave = 2. # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
            n = int((wanted_max_wave-trans_wave[-1])/coarse_dwave)+1
            new_trans_wave = np.append(trans_wave,np.linspace(trans_wave[-1]+coarse_dwave,trans_wave[-1]+coarse_dwave*(n+1),n))
            new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
            new_transmission[:,:trans_wave.size] = transmission
            trans_wave   = new_trans_wave
            transmission = new_transmission

    # whether to use QSO or SIMQSO to generate quasar continua.  Simulate
    # spectra in the north vs south separately because they're on different
    # photometric systems.
    south = np.where( is_south(metadata['DEC']) )[0]
    north = np.where( ~is_south(metadata['DEC']) )[0]
    meta, qsometa = empty_metatable(nqso, objtype='QSO', simqso=not args.no_simqso)
    if args.no_simqso:
        log.info("Simulate {} QSOs with QSO templates".format(nqso))
        tmp_qso_flux = np.zeros([nqso, len(model.eigenwave)], dtype='f4')
        tmp_qso_wave = np.zeros_like(tmp_qso_flux)
    else:
        log.info("Simulate {} QSOs with SIMQSO templates".format(nqso))
        tmp_qso_flux = np.zeros([nqso, len(model.basewave)], dtype='f4')
        tmp_qso_wave = model.basewave

    for these, issouth in zip( (north, south), (False, True) ):

        # number of quasars in these
        nt = len(these)
        if nt<=0: continue

        if not eboss is None:
            # for eBOSS, generate only quasars with r<22
            magrange = (17.0, 21.3)
            _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
                = model.make_templates(nmodel=nt,
                    redshift=metadata['Z'][these], magrange=magrange,
                    lyaforest=False, nocolorcuts=True,
                    noresample=True, seed=seed, south=issouth)
        else:
            _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
                = model.make_templates(nmodel=nt,
                    redshift=metadata['Z'][these],
                    lyaforest=False, nocolorcuts=True,
                    noresample=True, seed=seed, south=issouth)

        _meta['TARGETID'] = metadata['MOCKID'][these]
        _qsometa['TARGETID'] = metadata['MOCKID'][these]
        meta[these] = _meta
        qsometa[these] = _qsometa
        tmp_qso_flux[these, :] = _tmp_qso_flux

        if args.no_simqso:
            tmp_qso_wave[these, :] = _tmp_qso_wave

    log.info("Resample to transmission wavelength grid")
    qso_flux=np.zeros((tmp_qso_flux.shape[0],trans_wave.size))
    if args.no_simqso:
        for q in range(tmp_qso_flux.shape[0]) :
            qso_flux[q]=np.interp(trans_wave,tmp_qso_wave[q],tmp_qso_flux[q])
    else:
        for q in range(tmp_qso_flux.shape[0]) :
            qso_flux[q]=np.interp(trans_wave,tmp_qso_wave,tmp_qso_flux[q])

    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    # if requested, add BAL features to the quasar continua
    if args.balprob:
        if args.balprob<=1. and args.balprob >0:
            log.info("Adding BALs with probability {}".format(args.balprob))
            # save current random state
            rnd_state = np.random.get_state()
            tmp_qso_flux,meta_bal=bal.insert_bals(tmp_qso_wave,tmp_qso_flux, metadata['Z'],
                                                  balprob=args.balprob,seed=seed)
            # restore random state to get the same random numbers later
            # as when we don't insert BALs
            np.random.set_state(rnd_state)
            meta_bal['TARGETID'] = metadata['MOCKID']
            w = meta_bal['TEMPLATEID']!=-1
            meta_bal = meta_bal[:][w]
            hdu_bal=pyfits.convenience.table_to_hdu(meta_bal); hdu_bal.name="BAL_META"
            del meta_bal
        else:
            balstr=str(args.balprob)
            log.error("BAL probability is not between 0 and 1 : "+balstr)
            sys.exit(1)

    # Multiply quasar continua by transmitted flux fraction
    # (at this point transmission file might include Ly-beta, metals and DLAs)
    log.info("Apply transmitted flux fraction")
    if not args.no_transmission:
        tmp_qso_flux = apply_lya_transmission(tmp_qso_wave,tmp_qso_flux,
                            trans_wave,transmission)

    # if requested, compute metal transmission on the fly
    # (if not included already from the transmission file)
    if args.metals is not None:
        if args.metals_from_file :
            log.error('you cannot add metals twice')
            raise ValueError('you cannot add metals twice')
        if args.no_transmission:
            log.error('you cannot add metals if asking for no-transmission')
            raise ValueError('can not add metals if using no-transmission')
        lstMetals = ''
        for m in args.metals: lstMetals += m+', '
        log.info("Apply metals: {}".format(lstMetals[:-2]))

        tmp_qso_flux = apply_metals_transmission(tmp_qso_wave,tmp_qso_flux,
                            trans_wave,transmission,args.metals)

    # if requested, compute magnitudes and apply target selection.  Need to do
    # this calculation separately for QSOs in the north vs south.
    bbflux=None
    if args.target_selection or args.bbflux :
        bands=['FLUX_G','FLUX_R','FLUX_Z', 'FLUX_W1', 'FLUX_W2']
        bbflux=dict()
        bbflux['SOUTH'] = is_south(metadata['DEC'])
        for band in bands:
            bbflux[band] = np.zeros(nqso)
        # need to recompute the magnitudes to account for lya transmission
        log.info("Compute QSO magnitudes")

        for these, filters in zip( (~bbflux['SOUTH'], bbflux['SOUTH']),
                                   (bassmzls_and_wise_filters, decam_and_wise_filters) ):
            if np.count_nonzero(these) > 0:
                maggies = filters.get_ab_maggies(1e-17 * tmp_qso_flux[these, :], tmp_qso_wave)
                for band, filt in zip( bands, maggies.colnames ):
                    bbflux[band][these] = np.ma.getdata(1e9 * maggies[filt]) # nanomaggies

    if args.target_selection :
        log.info("Apply target selection")
        isqso = np.ones(nqso, dtype=bool)
        for these, issouth in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (False, True) ):
            if np.count_nonzero(these) > 0:
                # optical cuts only if using QSO vs SIMQSO
                isqso[these] &= isQSO_colors(gflux=bbflux['FLUX_G'][these],
                                             rflux=bbflux['FLUX_R'][these],
                                             zflux=bbflux['FLUX_Z'][these],
                                             w1flux=bbflux['FLUX_W1'][these],
                                             w2flux=bbflux['FLUX_W2'][these],
                                             south=issouth, optical=args.no_simqso)

        log.info("Target selection: {}/{} QSOs selected".format(np.sum(isqso),nqso))
        selection=np.where(isqso)[0]
        if selection.size==0 : return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata     = metadata[:][selection]
        meta         = meta[:][selection]
        qsometa      = qsometa[:][selection]
        DZ_FOG      = DZ_FOG[selection]
        for band in bands :
            bbflux[band] = bbflux[band][selection]
        bbflux['SOUTH']=bbflux['SOUTH'][selection]  
            
        nqso         = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # careful integration of bins, not just a simple interpolation
    qso_wave=np.linspace(args.wmin,args.wmax,int((args.wmax-args.wmin)/args.dwave)+1)
    qso_flux=np.zeros((tmp_qso_flux.shape[0],qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]) :
        qso_flux[q]=resample_flux(qso_wave,tmp_qso_wave,tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    if "MOCKID" in metadata.dtype.names :
        #log.warning("Using MOCKID as TARGETID")
        targetid=np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names :
        log.warning("Using ID as TARGETID")
        targetid=np.array(metadata["ID"]).astype(int)
    else :
        log.warning("No TARGETID")
        targetid=None

    specmeta={"HPXNSIDE":nside,"HPXPIXEL":pixel, "HPXNEST":hpxnest}

    if args.target_selection or args.bbflux :
        fibermap_columns = dict(
            FLUX_G = bbflux['FLUX_G'],
            FLUX_R = bbflux['FLUX_R'],
            FLUX_Z = bbflux['FLUX_Z'],
            FLUX_W1 = bbflux['FLUX_W1'],
            FLUX_W2 = bbflux['FLUX_W2'],
            )
        photsys = np.full(len(bbflux['FLUX_G']), 'N', dtype='S1')
        photsys[bbflux['SOUTH']] = b'S'
        fibermap_columns['PHOTSYS'] = photsys
    else :
        fibermap_columns=None

    # Attenuate the spectra for extinction
    if not sfdmap is None:
       Rv=3.1   #set by default
       indx=np.arange(metadata['RA'].size)
       extinction =Rv*ext_odonnell(qso_wave)
       EBV = sfdmap.ebv(metadata['RA'],metadata['DEC'], scaling=1.0)
       qso_flux *=10**( -0.4 * EBV[indx, np.newaxis] * extinction)
       if fibermap_columns is not None:
          fibermap_columns['EBV']=EBV
       EBV0=0.0
       EBV_med=np.median(EBV)
       Ag = 3.303 * (EBV_med - EBV0)
       exptime_fact=np.power(10.0, (2.0 * Ag / 2.5))
       obsconditions['EXPTIME']*=exptime_fact
       log.info("Dust extinction added")
       log.info('exposure time adjusted to {}'.format(obsconditions['EXPTIME']))

    sim_spectra(qso_wave,qso_flux, args.program, obsconditions=obsconditions,spectra_filename=ofilename,
                sourcetype="qso", skyerr=args.skyerr,ra=metadata["RA"],dec=metadata["DEC"],targetid=targetid,
                meta=specmeta,seed=seed,fibermap_columns=fibermap_columns,use_poisson=False) # use Poisson = False to get reproducible results.

    ### Keep input redshift
    Z_spec = metadata['Z'].copy()
    Z_input = metadata['Z'].copy()-DZ_FOG

    ### Add a shift to the redshift, simulating the systematic imprecision of redrock
    DZ_sys_shift = args.shift_kms_los/c*(1.+Z_input)
    log.info('Added a shift of {} km/s to the redshift'.format(args.shift_kms_los))
    meta['REDSHIFT'] += DZ_sys_shift
    metadata['Z'] += DZ_sys_shift

    ### Add a shift to the redshift, simulating the statistic imprecision of redrock
    if args.gamma_kms_zfit:
        log.info("Added zfit error with gamma {} to zbest".format(args.gamma_kms_zfit))
        DZ_stat_shift = mod_cauchy(loc=0,scale=args.gamma_kms_zfit,size=nqso,cut=3000)/c*(1.+Z_input)
        meta['REDSHIFT'] += DZ_stat_shift
        metadata['Z'] += DZ_stat_shift

    ## Write the truth file, including metadata for DLAs and BALs
    log.info('Writing a truth file  {}'.format(truth_filename))
    meta.rename_column('REDSHIFT','Z')
    meta.add_column(Column(Z_spec,name='TRUEZ'))
    meta.add_column(Column(Z_input,name='Z_INPUT'))
    meta.add_column(Column(DZ_FOG,name='DZ_FOG'))
    meta.add_column(Column(DZ_sys_shift,name='DZ_SYS'))
    if args.gamma_kms_zfit:
        meta.add_column(Column(DZ_stat_shift,name='DZ_STAT'))
    if 'Z_noRSD' in metadata.dtype.names:
        meta.add_column(Column(metadata['Z_noRSD'],name='Z_NORSD'))
    else:
        log.info('Z_noRSD field not present in transmission file. Z_NORSD not saved to truth file')

    #Save global seed and pixel seed to primary header
    hdr=pyfits.Header()
    hdr['GSEED']=global_seed
    hdr['PIXSEED']=seed
    hdu = pyfits.convenience.table_to_hdu(meta)
    hdu.header['EXTNAME'] = 'TRUTH'
    hduqso=pyfits.convenience.table_to_hdu(qsometa)
    hduqso.header['EXTNAME'] = 'QSO_META'
    hdulist=pyfits.HDUList([pyfits.PrimaryHDU(header=hdr),hdu,hduqso])
    if args.dla:
        hdulist.append(hdu_dla)
    if args.balprob:
        hdulist.append(hdu_bal)
    hdulist.writeto(truth_filename, overwrite=True)
    hdulist.close()




    if args.zbest :
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)
        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [
            ('CHI2', 'f8'),
            ('COEFF', 'f8' , (4,)),
            ('Z', 'f8'),
            ('ZERR', 'f8'),
            ('ZWARN', 'i8'),
            ('SPECTYPE', (str,96)),
            ('SUBTYPE', (str,16)),
            ('TARGETID', 'i8'),
            ('DELTACHI2', 'f8'),
            ('BRICKNAME', (str,8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest['CHI2'][:] = 0.
        zbest['Z'][:] = metadata['Z']
        zbest['ZERR'][:] = 0.
        zbest['ZWARN'][:] = 0
        zbest['SPECTYPE'][:] = 'QSO'
        zbest['SUBTYPE'][:] = ''
        zbest['TARGETID'][:] = metadata['MOCKID']
        zbest['DELTACHI2'][:] = 25.
        hzbest = pyfits.convenience.table_to_hdu(zbest); hzbest.name='ZBEST'
        hfmap  = pyfits.convenience.table_to_hdu(fibermap);  hfmap.name='FIBERMAP'
        hdulist =pyfits.HDUList([pyfits.PrimaryHDU(),hzbest,hfmap])
        hdulist.writeto(zbest_filename, overwrite=True)
        hdulist.close() # see if this helps with memory issue
Ejemplo n.º 7
0
def simulate_one_healpix(ifilename,args,model,obsconditions,decam_and_wise_filters,
                         bassmzls_and_wise_filters,footprint_healpix_weight,
                         footprint_healpix_nside,
                         bal=None,sfdmap=None,eboss=None) :
    log = get_logger()

    # open filename and extract basic HEALPix information
    pixel, nside, hpxnest = get_healpix_info(ifilename)

    # using global seed (could be None) get seed for this particular pixel
    global_seed = args.seed
    seed = get_pixel_seed(pixel, nside, global_seed)
    # use this seed to generate future random numbers
    np.random.seed(seed)

    # get output file (we will write there spectra for this HEALPix pixel)
    ofilename = get_spectra_filename(args,nside,pixel)
    # get directory name (we will also write there zbest file)
    pixdir = os.path.dirname(ofilename)

    # get filename for truth file
    truth_filename = get_truth_filename(args,pixdir,nside,pixel)

    # get filename for zbest file
    zbest_filename = get_zbest_filename(args,pixdir,nside,pixel)

    if not args.overwrite :
        # check whether output exists or not
        if args.zbest :
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename) :
                log.info("skip existing {} and {}".format(ofilename,zbest_filename))
                return
        else : # only test spectra file
            if os.path.isfile(ofilename) :
                log.info("skip existing {}".format(ofilename))
                return

    # create sub-directories if required
    if len(pixdir)>0 :
        if not os.path.isdir(pixdir) :
            log.info("Creating dir {}".format(pixdir))
            os.makedirs(pixdir)

    log.info("Read skewers in {}, random seed = {}".format(ifilename,seed))

    # Read transmission from files. It might include DLA information, and it
    # might add metal transmission as well (from the HDU file).
    log.info("Read transmission file {}".format(ifilename))
    trans_wave, transmission, metadata, dla_info = read_lya_skewers(ifilename,read_dlas=(args.dla=='file'),add_metals=args.metals_from_file)

    ### Add Finger-of-God, before generate the continua
    log.info("Add FOG to redshift with sigma {} to quasar redshift".format(args.sigma_kms_fog))
    DZ_FOG = args.sigma_kms_fog/c*(1.+metadata['Z'])*np.random.normal(0,1,metadata['Z'].size)
    metadata['Z'] += DZ_FOG

    ### Select quasar within a given redshift range
    w = (metadata['Z']>=args.zmin) & (metadata['Z']<=args.zmax)
    transmission = transmission[w]
    metadata = metadata[:][w]
    DZ_FOG = DZ_FOG[w]

    # option to make for BOSS+eBOSS
    if not eboss is None:
        if args.downsampling or args.desi_footprint:
            raise ValueError("eboss option can not be run with "
                    +"desi_footprint or downsampling")

        # Get the redshift distribution from SDSS
        selection = sdss_subsample_redshift(metadata["RA"],metadata["DEC"],metadata['Z'],eboss['redshift'])
        log.info("Select QSOs in BOSS+eBOSS redshift distribution {} -> {}".format(metadata['Z'].size,selection.sum()))
        if selection.sum()==0:
            log.warning("No intersection with BOSS+eBOSS redshift distribution")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]

        # figure out the density of all quasars
        N_highz = metadata['Z'].size
        # area of healpix pixel, in degrees
        area_deg2 = healpy.pixelfunc.nside2pixarea(nside,degrees=True)
        input_highz_dens_deg2 = N_highz/area_deg2
        selection = sdss_subsample(metadata["RA"], metadata["DEC"],
                        input_highz_dens_deg2,eboss['footprint'])
        log.info("Select QSOs in BOSS+eBOSS footprint {} -> {}".format(transmission.shape[0],selection.size))
        if selection.size == 0 :
            log.warning("No intersection with BOSS+eBOSS footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]

    if args.desi_footprint :
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside, metadata["RA"], metadata["DEC"])
        selection = np.where(footprint_healpix_weight[footprint_healpix]>0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(transmission.shape[0],selection.size))
        if selection.size == 0 :
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]



    nqso=transmission.shape[0]
    if args.downsampling is not None :
        if args.downsampling <= 0 or  args.downsampling > 1 :
           log.error("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
           raise ValueError("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
        indices = np.where(np.random.uniform(size=nqso)<args.downsampling)[0]
        if indices.size == 0 :
            log.warning("Down sampling from {} to 0 (by chance I presume)".format(nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        DZ_FOG = DZ_FOG[indices]
        nqso = transmission.shape[0]

    if args.nmax is not None :
        if args.nmax < nqso :
            log.info("Limit number of QSOs from {} to nmax={} (random subsample)".format(nqso,args.nmax))
            # take a random subsample
            indices = (np.random.uniform(size=args.nmax)*nqso).astype(int)
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            DZ_FOG = DZ_FOG[indices]
            nqso = args.nmax

    # In previous versions of the London mocks we needed to enforce F=1 for
    # z > z_qso here, but this is not needed anymore. Moreover, now we also
    # have metal absorption that implies F < 1 for z > z_qso
    #for ii in range(len(metadata)):
    #    transmission[ii][trans_wave>lambda_RF_LYA*(metadata[ii]['Z']+1)]=1.0

    # if requested, add DLA to the transmission skewers
    if args.dla is not None :

        # if adding random DLAs, we will need a new random generator
        if args.dla=='random':
            log.info('Adding DLAs randomly')
            random_state_just_for_dlas = np.random.RandomState(seed)
        elif args.dla=='file':
            log.info('Adding DLAs from transmission file')
        else:
            log.error("Wrong option for args.dla: "+args.dla)
            sys.exit(1)

        # if adding DLAs, the information will be printed here
        dla_filename=os.path.join(pixdir,"dla-{}-{}.fits".format(nside,pixel))
        dla_NHI, dla_z, dla_qid,dla_id = [], [], [],[]

        # identify minimum Lya redshift in transmission files
        min_lya_z = np.min(trans_wave/lambda_RF_LYA - 1)

        # loop over quasars in pixel

        for ii in range(len(metadata)):

            # quasars with z < min_z will not have any DLA in spectrum
            if min_lya_z>metadata['Z'][ii]: continue

            # quasar ID
            idd=metadata['MOCKID'][ii]
            dlas=[]

            if args.dla=='file':
                for dla in dla_info[dla_info['MOCKID']==idd]:

                    # Adding only DLAs with z < zqso
                    if dla['Z_DLA_RSD']>=metadata['Z'][ii]: continue
                    dlas.append(dict(z=dla['Z_DLA_RSD'],N=dla['N_HI_DLA'],dlaid=dla['DLAID']))
                transmission_dla = dla_spec(trans_wave,dlas)

            elif args.dla=='random':
                dlas, transmission_dla = insert_dlas(trans_wave, metadata['Z'][ii], rstate=random_state_just_for_dlas)
                for idla in dlas:
                   idla['dlaid']+=idd*1000      #Added to have unique DLA ids. Same format as DLAs from file.

            # multiply transmissions and store information for the DLA file
            if len(dlas)>0:
                transmission[ii] = transmission_dla * transmission[ii]
                dla_z += [idla['z'] for idla in dlas]
                dla_NHI += [idla['N'] for idla in dlas]
                dla_id += [idla['dlaid'] for idla in dlas]
                dla_qid += [idd]*len(dlas)
        log.info('Added {} DLAs'.format(len(dla_id)))
        # write file with DLA information
        if len(dla_id)>0:
            dla_meta=Table()
            dla_meta['NHI'] = dla_NHI
            dla_meta['Z_DLA'] = dla_z  #This is Z_DLA_RSD in transmision.
            dla_meta['TARGETID']=dla_qid
            dla_meta['DLAID'] = dla_id
            hdu_dla = pyfits.convenience.table_to_hdu(dla_meta)
            hdu_dla.name="DLA_META"
            del(dla_meta)
            log.info("DLA metadata to be saved in {}".format(truth_filename))
        else:
            hdu_dla=pyfits.PrimaryHDU()
            hdu_dla.name="DLA_META"

    # if requested, extend transmission skewers to cover full spectrum
    if args.target_selection or args.bbflux :
        wanted_min_wave = 3329. # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
        wanted_max_wave = 55501. # needed to compute magnitudes for wise2010-W2

        if trans_wave[0]>wanted_min_wave :
            log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(wanted_min_wave),int(trans_wave[-1])))
            # pad with ones at short wavelength, we assume F = 1 for z <~ 1.7
            # we don't need any wavelength resolution here
            new_trans_wave = np.append([wanted_min_wave,trans_wave[0]-0.01],trans_wave)
            new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
            new_transmission[:,2:] = transmission
            trans_wave   = new_trans_wave
            transmission = new_transmission

        if trans_wave[-1]<wanted_max_wave :
            log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(trans_wave[0]),int(wanted_max_wave)))
            # pad with ones at long wavelength because we assume F = 1
            coarse_dwave = 2. # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
            n = int((wanted_max_wave-trans_wave[-1])/coarse_dwave)+1
            new_trans_wave = np.append(trans_wave,np.linspace(trans_wave[-1]+coarse_dwave,trans_wave[-1]+coarse_dwave*(n+1),n))
            new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
            new_transmission[:,:trans_wave.size] = transmission
            trans_wave   = new_trans_wave
            transmission = new_transmission

    # whether to use QSO or SIMQSO to generate quasar continua.  Simulate
    # spectra in the north vs south separately because they're on different
    # photometric systems.
    south = np.where( is_south(metadata['DEC']) )[0]
    north = np.where( ~is_south(metadata['DEC']) )[0]
    meta, qsometa = empty_metatable(nqso, objtype='QSO', simqso=not args.no_simqso)
    if args.no_simqso:
        log.info("Simulate {} QSOs with QSO templates".format(nqso))
        tmp_qso_flux = np.zeros([nqso, len(model.eigenwave)], dtype='f4')
        tmp_qso_wave = np.zeros_like(tmp_qso_flux)
    else:
        log.info("Simulate {} QSOs with SIMQSO templates".format(nqso))
        tmp_qso_flux = np.zeros([nqso, len(model.basewave)], dtype='f4')
        tmp_qso_wave = model.basewave

    for these, issouth in zip( (north, south), (False, True) ):

        # number of quasars in these
        nt = len(these)
        if nt<=0: continue

        if not eboss is None:
            # for eBOSS, generate only quasars with r<22
            magrange = (17.0, 21.3)
            _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
                = model.make_templates(nmodel=nt,
                    redshift=metadata['Z'][these], magrange=magrange,
                    lyaforest=False, nocolorcuts=True,
                    noresample=True, seed=seed, south=issouth)
        else:
            _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
                = model.make_templates(nmodel=nt,
                    redshift=metadata['Z'][these],
                    lyaforest=False, nocolorcuts=True,
                    noresample=True, seed=seed, south=issouth)

        _meta['TARGETID'] = metadata['MOCKID'][these]
        _qsometa['TARGETID'] = metadata['MOCKID'][these]
        meta[these] = _meta
        qsometa[these] = _qsometa
        tmp_qso_flux[these, :] = _tmp_qso_flux

        if args.no_simqso:
            tmp_qso_wave[these, :] = _tmp_qso_wave

    log.info("Resample to transmission wavelength grid")
    qso_flux=np.zeros((tmp_qso_flux.shape[0],trans_wave.size))
    if args.no_simqso:
        for q in range(tmp_qso_flux.shape[0]) :
            qso_flux[q]=np.interp(trans_wave,tmp_qso_wave[q],tmp_qso_flux[q])
    else:
        for q in range(tmp_qso_flux.shape[0]) :
            qso_flux[q]=np.interp(trans_wave,tmp_qso_wave,tmp_qso_flux[q])

    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    # if requested, add BAL features to the quasar continua
    if args.balprob:
        if args.balprob<=1. and args.balprob >0:
            log.info("Adding BALs with probability {}".format(args.balprob))
            # save current random state
            rnd_state = np.random.get_state()
            tmp_qso_flux,meta_bal=bal.insert_bals(tmp_qso_wave,tmp_qso_flux, metadata['Z'],
                                                  balprob=args.balprob,seed=seed)
            # restore random state to get the same random numbers later
            # as when we don't insert BALs
            np.random.set_state(rnd_state)
            meta_bal['TARGETID'] = metadata['MOCKID']
            w = meta_bal['TEMPLATEID']!=-1
            meta_bal = meta_bal[:][w]
            hdu_bal=pyfits.convenience.table_to_hdu(meta_bal); hdu_bal.name="BAL_META"
            del meta_bal
        else:
            balstr=str(args.balprob)
            log.error("BAL probability is not between 0 and 1 : "+balstr)
            sys.exit(1)

    # Multiply quasar continua by transmitted flux fraction
    # (at this point transmission file might include Ly-beta, metals and DLAs)
    log.info("Apply transmitted flux fraction")
    if not args.no_transmission:
        tmp_qso_flux = apply_lya_transmission(tmp_qso_wave,tmp_qso_flux,
                            trans_wave,transmission)

    # if requested, compute metal transmission on the fly
    # (if not included already from the transmission file)
    if args.metals is not None:
        if args.metals_from_file:
            log.error('you cannot add metals twice')
            raise ValueError('you cannot add metals twice')
        if args.no_transmission:
            log.error('you cannot add metals if asking for no-transmission')
            raise ValueError('can not add metals if using no-transmission')
        lstMetals = ''
        for m in args.metals: lstMetals += m+', '
        log.info("Apply metals: {}".format(lstMetals[:-2]))

        tmp_qso_flux = apply_metals_transmission(tmp_qso_wave,tmp_qso_flux,
                            trans_wave,transmission,args.metals)

    # if requested, compute magnitudes and apply target selection.  Need to do
    # this calculation separately for QSOs in the north vs south.
    bbflux=None
    if args.target_selection or args.bbflux :
        bands=['FLUX_G','FLUX_R','FLUX_Z', 'FLUX_W1', 'FLUX_W2']
        bbflux=dict()
        bbflux['SOUTH'] = is_south(metadata['DEC'])
        for band in bands:
            bbflux[band] = np.zeros(nqso)
        # need to recompute the magnitudes to account for lya transmission
        log.info("Compute QSO magnitudes")

        for these, filters in zip( (~bbflux['SOUTH'], bbflux['SOUTH']),
                                   (bassmzls_and_wise_filters, decam_and_wise_filters) ):
            if np.count_nonzero(these) > 0:
                maggies = filters.get_ab_maggies(1e-17 * tmp_qso_flux[these, :], tmp_qso_wave)
                for band, filt in zip( bands, maggies.colnames ):
                    bbflux[band][these] = np.ma.getdata(1e9 * maggies[filt]) # nanomaggies

    if args.target_selection :
        log.info("Apply target selection")
        isqso = np.ones(nqso, dtype=bool)
        for these, issouth in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (False, True) ):
            if np.count_nonzero(these) > 0:
                # optical cuts only if using QSO vs SIMQSO
                isqso[these] &= isQSO_colors(gflux=bbflux['FLUX_G'][these],
                                             rflux=bbflux['FLUX_R'][these],
                                             zflux=bbflux['FLUX_Z'][these],
                                             w1flux=bbflux['FLUX_W1'][these],
                                             w2flux=bbflux['FLUX_W2'][these],
                                             south=issouth, optical=args.no_simqso)

        log.info("Target selection: {}/{} QSOs selected".format(np.sum(isqso),nqso))
        selection=np.where(isqso)[0]
        if selection.size==0 : return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata     = metadata[:][selection]
        meta         = meta[:][selection]
        qsometa      = qsometa[:][selection]
        DZ_FOG      = DZ_FOG[selection]

        for band in bands :
            bbflux[band] = bbflux[band][selection]
        nqso         = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # careful integration of bins, not just a simple interpolation
    qso_wave=np.linspace(args.wmin,args.wmax,int((args.wmax-args.wmin)/args.dwave)+1)
    qso_flux=np.zeros((tmp_qso_flux.shape[0],qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]) :
        qso_flux[q]=resample_flux(qso_wave,tmp_qso_wave,tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    if "MOCKID" in metadata.dtype.names :
        #log.warning("Using MOCKID as TARGETID")
        targetid=np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names :
        log.warning("Using ID as TARGETID")
        targetid=np.array(metadata["ID"]).astype(int)
    else :
        log.warning("No TARGETID")
        targetid=None

    specmeta={"HPXNSIDE":nside,"HPXPIXEL":pixel, "HPXNEST":hpxnest}

    if args.target_selection or args.bbflux :
        fibermap_columns = dict(
            FLUX_G = bbflux['FLUX_G'],
            FLUX_R = bbflux['FLUX_R'],
            FLUX_Z = bbflux['FLUX_Z'],
            FLUX_W1 = bbflux['FLUX_W1'],
            FLUX_W2 = bbflux['FLUX_W2'],
            )
        photsys = np.full(len(bbflux['FLUX_G']), 'N', dtype='S1')
        photsys[bbflux['SOUTH']] = b'S'
        fibermap_columns['PHOTSYS'] = photsys
    else :
        fibermap_columns=None

    # Attenuate the spectra for extinction
    if not sfdmap is None:
       Rv=3.1   #set by default
       indx=np.arange(metadata['RA'].size)
       extinction =Rv*ext_odonnell(qso_wave)
       EBV = sfdmap.ebv(metadata['RA'],metadata['DEC'], scaling=1.0)
       qso_flux *=10**( -0.4 * EBV[indx, np.newaxis] * extinction)
       if fibermap_columns is not None:
          fibermap_columns['EBV']=EBV
       EBV0=0.0
       EBV_med=np.median(EBV)
       Ag = 3.303 * (EBV_med - EBV0)
       exptime_fact=np.power(10.0, (2.0 * Ag / 2.5))
       obsconditions['EXPTIME']*=exptime_fact
       log.info("Dust extinction added")
       log.info('exposure time adjusted to {}'.format(obsconditions['EXPTIME']))

    sim_spectra(qso_wave,qso_flux, args.program, obsconditions=obsconditions,spectra_filename=ofilename,
                sourcetype="qso", skyerr=args.skyerr,ra=metadata["RA"],dec=metadata["DEC"],targetid=targetid,
                meta=specmeta,seed=seed,fibermap_columns=fibermap_columns,use_poisson=False) # use Poisson = False to get reproducible results.

    ### Keep input redshift
    Z_spec = metadata['Z'].copy()
    Z_input = metadata['Z'].copy()-DZ_FOG

    ### Add a shift to the redshift, simulating the systematic imprecision of redrock
    DZ_sys_shift = args.shift_kms_los/c*(1.+Z_input)
    log.info('Added a shift of {} km/s to the redshift'.format(args.shift_kms_los))
    meta['REDSHIFT'] += DZ_sys_shift
    metadata['Z'] += DZ_sys_shift

    ### Add a shift to the redshift, simulating the statistic imprecision of redrock
    if args.gamma_kms_zfit:
        log.info("Added zfit error with gamma {} to zbest".format(args.gamma_kms_zfit))
        DZ_stat_shift = mod_cauchy(loc=0,scale=args.gamma_kms_zfit,size=nqso,cut=3000)/c*(1.+Z_input)
        meta['REDSHIFT'] += DZ_stat_shift
        metadata['Z'] += DZ_stat_shift

    ## Write the truth file, including metadata for DLAs and BALs
    log.info('Writing a truth file  {}'.format(truth_filename))
    meta.rename_column('REDSHIFT','Z')
    meta.add_column(Column(Z_spec,name='TRUEZ'))
    meta.add_column(Column(Z_input,name='Z_INPUT'))
    meta.add_column(Column(DZ_FOG,name='DZ_FOG'))
    meta.add_column(Column(DZ_sys_shift,name='DZ_SYS'))
    if args.gamma_kms_zfit:
        meta.add_column(Column(DZ_stat_shift,name='DZ_STAT'))
    if 'Z_noRSD' in metadata.dtype.names:
        meta.add_column(Column(metadata['Z_noRSD'],name='Z_NORSD'))
    else:
        log.info('Z_noRSD field not present in transmission file. Z_NORSD not saved to truth file')

    hdu = pyfits.convenience.table_to_hdu(meta)
    hdu.header['EXTNAME'] = 'TRUTH'
    hduqso=pyfits.convenience.table_to_hdu(qsometa)
    hduqso.header['EXTNAME'] = 'QSO_META'
    hdulist=pyfits.HDUList([pyfits.PrimaryHDU(),hdu,hduqso])
    if args.dla:
        hdulist.append(hdu_dla)
    if args.balprob:
        hdulist.append(hdu_bal)
    hdulist.writeto(truth_filename, overwrite=True)
    hdulist.close()




    if args.zbest :
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)
        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [
            ('CHI2', 'f8'),
            ('COEFF', 'f8' , (4,)),
            ('Z', 'f8'),
            ('ZERR', 'f8'),
            ('ZWARN', 'i8'),
            ('SPECTYPE', (str,96)),
            ('SUBTYPE', (str,16)),
            ('TARGETID', 'i8'),
            ('DELTACHI2', 'f8'),
            ('BRICKNAME', (str,8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest['CHI2'][:] = 0.
        zbest['Z'][:] = metadata['Z']
        zbest['ZERR'][:] = 0.
        zbest['ZWARN'][:] = 0
        zbest['SPECTYPE'][:] = 'QSO'
        zbest['SUBTYPE'][:] = ''
        zbest['TARGETID'][:] = metadata['MOCKID']
        zbest['DELTACHI2'][:] = 25.
        hzbest = pyfits.convenience.table_to_hdu(zbest); hzbest.name='ZBEST'
        hfmap  = pyfits.convenience.table_to_hdu(fibermap);  hfmap.name='FIBERMAP'
        hdulist =pyfits.HDUList([pyfits.PrimaryHDU(),hzbest,hfmap])
        hdulist.writeto(zbest_filename, overwrite=True)
        hdulist.close() # see if this helps with memory issue
Ejemplo n.º 8
0
def main(args=None):

    log = get_logger()

    if isinstance(args, (list, tuple, type(None))):
        args = parse(args)

    # Save simulation output.
    rng = np.random.RandomState(args.seed)
    simdata = bgs_write_simdata(args)
    obs = simdata2obsconditions(args)

    # Generate list of HEALPix pixels to randomly sample from the mocks.
    healpixels = _get_healpixels_in_footprint(nside=args.nside)
    npix = np.minimum(10 * args.nsim, len(healpixels))
    pixels = rng.choice(healpixels, size=npix, replace=False)
    ipix = iter(pixels)

    # Set up the template generator.
    maker = BGSMaker(seed=args.seed)
    maker.template_maker = BGS(add_SNeIa=args.addsnia,
                               add_SNeIIp=args.addsniip,
                               wave=_default_wave())

    for j in range(args.nsim):

        # Loop until finding a non-empty healpixel (one with mock galaxies).
        tdata = []
        while len(tdata) == 0:
            pixel = next(ipix)
            tdata = maker.read(healpixels=pixel, nside=args.nside)

        # Add SN generation options.
        if args.addsnia or args.addsniip:
            tdata['SNE_FLUXRATIORANGE'] = (args.snrmin, args.snrmax)
            tdata['SNE_FILTER'] = 'decam2014-r'

        # Generate nspec spectral templates and write them to "truth" files.
        wave = None
        flux, targ, truth, obj = [], [], [], []

        # Generate templates until we have enough to pass brightness cuts.
        ntosim = np.min((args.nspec, len(tdata['RA'])))
        ngood = 0
        while ngood < args.nspec:
            idx = rng.choice(len(tdata['RA']), ntosim)
            tflux, twave, ttarg, ttruth, tobj = \
                maker.make_spectra(tdata, indx=idx)

            # Apply color cuts.
            is_bright = isBGS_colors(gflux=ttruth['FLUX_G'],
                                     rflux=ttruth['FLUX_R'],
                                     zflux=ttruth['FLUX_Z'],
                                     w1flux=ttruth['FLUX_W1'],
                                     w2flux=ttruth['FLUX_W2'],
                                     targtype='bright')

            is_faint = isBGS_colors(gflux=ttruth['FLUX_G'],
                                    rflux=ttruth['FLUX_R'],
                                    zflux=ttruth['FLUX_Z'],
                                    w1flux=ttruth['FLUX_W1'],
                                    w2flux=ttruth['FLUX_W2'],
                                    targtype='faint')

            is_wise = isBGS_colors(gflux=ttruth['FLUX_G'],
                                   rflux=ttruth['FLUX_R'],
                                   zflux=ttruth['FLUX_Z'],
                                   w1flux=ttruth['FLUX_W1'],
                                   w2flux=ttruth['FLUX_W2'],
                                   targtype='wise')

            keep = np.logical_or(np.logical_or(is_bright, is_faint), is_wise)

            _ngood = np.count_nonzero(keep)
            if _ngood > 0:
                ngood += _ngood
                flux.append(tflux[keep, :])
                targ.append(ttarg[keep])
                truth.append(ttruth[keep])
                obj.append(tobj[keep])

        wave = maker.wave
        flux = np.vstack(flux)[:args.nspec, :]
        targ = vstack(targ)[:args.nspec]
        truth = vstack(truth)[:args.nspec]
        obj = vstack(obj)[:args.nspec]

        if args.addsnia or args.addsniip:
            # TARGETID in truth table is split in two; deal with it here.
            truth['TARGETID'] = truth['TARGETID_1']

        # Set up and verify the TARGETID across all truth tables.
        n = len(truth)
        new_id = 10000000 * pixel + 100000 * j + np.arange(1, n + 1)

        truth['TARGETID'][:] = new_id
        targ['TARGETID'][:] = new_id
        obj['TARGETID'][:] = new_id

        assert (len(truth) == args.nspec)
        assert (np.all(targ['TARGETID'] == truth['TARGETID']))
        assert (len(truth) == len(np.unique(truth['TARGETID'])))
        assert (len(targ) == len(np.unique(targ['TARGETID'])))
        assert (len(obj) == len(np.unique(obj['TARGETID'])))

        truthfile = os.path.join(
            args.simdir, 'bgs_{}_{:03}_truth.fits'.format(args.simid, j))
        write_templates(truthfile, flux, wave, targ, truth, obj)

        # Generate simulated spectra, given observing conditions.
        specfile = os.path.join(
            args.simdir, 'bgs_{}_{:03}_spectra.fits'.format(args.simid, j))
        sim_spectra(wave,
                    flux,
                    'bgs',
                    specfile,
                    obsconditions=obs,
                    sourcetype='bgs',
                    targetid=truth['TARGETID'],
                    redshift=truth['TRUEZ'],
                    seed=args.seed,
                    expid=j)
Ejemplo n.º 9
0
def simulate_one_healpix(ifilename, args, model, obsconditions,
                         decam_and_wise_filters, footprint_healpix_weight,
                         footprint_healpix_nside):

    log = get_logger()

    healpix = 0
    nside = 0
    vals = os.path.basename(ifilename).split(".")[0].split("-")
    if len(vals) < 3:
        log.error("Cannot guess nside and healpix from filename {}".format(
            ifilename))
        raise ValueError(
            "Cannot guess nside and healpix from filename {}".format(
                ifilename))
    try:
        healpix = int(vals[-1])
        nside = int(vals[-2])
    except ValueError:
        raise ValueError(
            "Cannot guess nside and healpix from filename {}".format(
                ifilename))

    zbest_filename = None
    if args.outfile:
        ofilename = args.outfile
    else:
        ofilename = os.path.join(
            args.outdir,
            "{}/{}/spectra-{}-{}.fits".format(healpix // 100, healpix, nside,
                                              healpix))
    pixdir = os.path.dirname(ofilename)

    if not args.overwrite:
        # check whether output exists or not

        if args.zbest:
            zbest_filename = os.path.join(
                pixdir, "zbest-{}-{}.fits".format(nside, healpix))
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename):
                log.info("skip existing {} and {}".format(
                    ofilename, zbest_filename))
                return
        else:  # only test spectra file
            if os.path.isfile(ofilename):
                log.info("skip existing {}".format(ofilename))
                return

    log.info("Read skewers in {}".format(ifilename))
    trans_wave, transmission, metadata = read_lya_skewers(ifilename)
    ok = np.where((metadata['Z'] >= args.zmin)
                  & (metadata['Z'] <= args.zmax))[0]
    transmission = transmission[ok]
    metadata = metadata[:][ok]

    # set seed now in case we are downsampling
    np.random.seed(args.seed)

    # create quasars

    if args.desi_footprint:
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside,
                                                metadata["RA"],
                                                metadata["DEC"])
        selection = np.where(
            footprint_healpix_weight[footprint_healpix] > 0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(
            transmission.shape[0], selection.size))
        if selection.size == 0:
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]

    nqso = transmission.shape[0]
    if args.downsampling is not None:
        if args.downsampling <= 0 or args.downsampling > 1:
            log.error(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
            raise ValueError(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
        indices = np.where(np.random.uniform(size=nqso) < args.downsampling)[0]
        if indices.size == 0:
            log.warning(
                "Down sampling from {} to 0 (by chance I presume)".format(
                    nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        nqso = transmission.shape[0]

    if args.nmax is not None:
        if args.nmax < nqso:
            log.info(
                "Limit number of QSOs from {} to nmax={} (random subsample)".
                format(nqso, args.nmax))
            # take a random subsample
            indices = (np.random.uniform(size=args.nmax) * nqso).astype(int)
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            nqso = args.nmax

    log.info("Simulate {} QSOs".format(nqso))
    tmp_qso_flux, tmp_qso_wave, meta = model.make_templates(
        nmodel=nqso,
        redshift=metadata['Z'],
        seed=args.seed,
        lyaforest=False,
        nocolorcuts=True,
        noresample=True)

    log.info("Resample to transmission wavelength grid")
    # because we don't want to alter the transmission field with resampling here
    qso_flux = np.zeros((tmp_qso_flux.shape[0], trans_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = np.interp(trans_wave, tmp_qso_wave, tmp_qso_flux[q])
    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    log.info("Apply lya")
    tmp_qso_flux = apply_lya_transmission(tmp_qso_wave, tmp_qso_flux,
                                          trans_wave, transmission)

    if args.target_selection:
        log.info("Compute QSO magnitudes for target selection")
        maggies = decam_and_wise_filters.get_ab_maggies(1e-17 * tmp_qso_flux,
                                                        tmp_qso_wave.copy(),
                                                        mask_invalid=True)
        for band, filt in zip(
            ('FLUX_G', 'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2'),
            ('decam2014-g', 'decam2014-r', 'decam2014-z', 'wise2010-W1',
             'wise2010-W2')):
            meta[band] = np.ma.getdata(1e9 * maggies[filt])  # nanomaggies
        isqso = isQSO_colors(gflux=meta['FLUX_G'],
                             rflux=meta['FLUX_R'],
                             zflux=meta['FLUX_Z'],
                             w1flux=meta['FLUX_W1'],
                             w2flux=meta['FLUX_W2'])
        log.info("Target selection: {}/{} QSOs selected".format(
            np.sum(isqso), nqso))
        selection = np.where(isqso)[0]
        if selection.size == 0: return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata = metadata[:][selection]
        meta = meta[:][selection]
        nqso = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # we need a linear grid. for this resampling we take care of integrating in bins
    # we do not do a simple interpolation
    qso_wave = np.linspace(args.wmin, args.wmax,
                           int((args.wmax - args.wmin) / args.dwave) + 1)
    qso_flux = np.zeros((tmp_qso_flux.shape[0], qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = resample_flux(qso_wave, tmp_qso_wave, tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    pixdir = os.path.dirname(ofilename)
    if not os.path.isdir(pixdir):
        log.info("Creating dir {}".format(pixdir))
        os.makedirs(pixdir)

    if "MOCKID" in metadata.dtype.names:
        #log.warning("Using MOCKID as TARGETID")
        targetid = np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names:
        log.warning("Using ID as TARGETID")
        targetid = np.array(metadata["ID"]).astype(int)
    else:
        log.warning("No TARGETID")
        targetid = None

    sim_spectra(qso_wave,
                qso_flux,
                args.program,
                obsconditions=obsconditions,
                spectra_filename=ofilename,
                seed=args.seed,
                sourcetype="qso",
                skyerr=args.skyerr,
                ra=metadata["RA"],
                dec=metadata["DEC"],
                targetid=targetid)

    if args.zbest:
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)

        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [('CHI2', 'f8'), ('COEFF', 'f8', (4, )), ('Z', 'f8'),
                   ('ZERR', 'f8'), ('ZWARN', 'i8'), ('SPECTYPE', (str, 96)),
                   ('SUBTYPE', (str, 16)), ('TARGETID', 'i8'),
                   ('DELTACHI2', 'f8'), ('BRICKNAME', (str, 8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest["CHI2"][:] = 0.
        zbest["Z"] = metadata['Z']
        zbest["ZERR"][:] = 0.
        zbest["ZWARN"][:] = 0
        zbest["SPECTYPE"][:] = "QSO"
        zbest["SUBTYPE"][:] = ""
        zbest["TARGETID"] = fibermap["TARGETID"]
        zbest["DELTACHI2"][:] = 25.

        hzbest = pyfits.convenience.table_to_hdu(zbest)
        hzbest.name = "ZBEST"
        hfmap = pyfits.convenience.table_to_hdu(fibermap)
        hfmap.name = "FIBERMAP"

        hdulist = pyfits.HDUList([pyfits.PrimaryHDU(), hzbest, hfmap])
        hdulist.writeto(zbest_filename, clobber=True)
        hdulist.close()  # see if this helps with memory issue