コード例 #1
0
ファイル: quicklook.py プロジェクト: rstaten/desispec
def setup_pipeline(config):
    """
       Given a configuration from QLF, this sets up a pipeline [pa,qa] and also returns a
       conversion dictionary from the configuration dictionary so that Pipeline steps (PA) can
       take them. This is required for runpipeline.
    """
    import astropy.io.fits as fits
    import desispec.io.fibermap as fibIO
    import desispec.io.sky as skyIO
    import desispec.io.fiberflat as ffIO
    import desispec.fiberflat as ff
    import desispec.io.image as imIO
    import desispec.image as im
    import desispec.io.frame as frIO
    import desispec.frame as dframe
    from desispec.quicklook import procalgs
    from desispec.boxcar import do_boxcar

    qlog=qllogger.QLLogger("QuickLook",20)
    log=qlog.getlog()
    if config is None:
        return None
    log.info("Reading Configuration")
    if "RawImage" not in config:
        log.critical("Config is missing \"RawImage\" key.")
        sys.exit("Missing \"RawImage\" key.")
    inpname=config["RawImage"]
    if "FiberMap" not in config:
        log.critical("Config is missing \"FiberMap\" key.")
        sys.exit("Missing \"FiberMap\" key.")
    fibname=config["FiberMap"]
    proctype="Exposure"
    if "Camera" in config:
        camera=config["Camera"]
    if "DataType" in config:
        proctype=config["DataType"]
    debuglevel=20
    if "DebugLevel" in config:
        debuglevel=config["DebugLevel"]
        log.setLevel(debuglevel)
    hbeat=QLHB.QLHeartbeat(log,config["Period"],config["Timeout"])
    if config["Timeout"]> 200.0:
        log.warning("Heartbeat timeout exceeding 200.0 seconds")
    dumpintermediates=False
    if "DumpIntermediates" in config:
        dumpintermediates=config["DumpIntermediates"]

    biasimage=None #- This will be the converted dictionary key
    biasfile=None
    if "BiasImage" in config:
        biasfile=config["BiasImage"]

    darkimage=None
    darkfile=None
    if "DarkImage" in config:
        darkfile=config["DarkImage"]

    pixelflatfile=None
    pixflatimage=None
    if "PixelFlat" in config:
        pixelflatfile=config["PixelFlat"]

    fiberflatimagefile=None
    fiberflatimage=None
    if "FiberFlatImage" in config:
        fiberflatimagefile=config["FiberFlatImage"]

    arclampimagefile=None
    arclampimage=None
    if "ArcLampImage" in config:
        arclampimagefile=config["ArcLampImage"]

    fiberflatfile=None
    fiberflat=None
    if "FiberFlatFile" in config:
        fiberflatfile=config["FiberFlatFile"]

    skyfile=None
    skyimage=None
    if "SkyFile" in config:
        skyfile=config["SkyFile"]

    psf=None
    if "PSFFile" in config:
        #from specter.psf import load_psf
        import desispec.psf
        psf=desispec.psf.PSF(config["PSFFile"])
        #psf=load_psf(config["PSFFile"])

    if "basePath" in config:
        basePath=config["basePath"]

    hbeat.start("Reading input file %s"%inpname)
    inp=fits.open(inpname) #- reading raw image directly from astropy.io.fits
    hbeat.start("Reading fiberMap file %s"%fibname)
    fibfile,fibhdr=fibIO.read_fibermap(fibname,header=True)

    convdict={"FiberMap":fibfile}

    if psf is not None:
        convdict["PSFFile"]=psf

    if biasfile is not None:
        hbeat.start("Reading Bias Image %s"%biasfile)
        biasimage=imIO.read_image(biasfile)
        convdict["BiasImage"]=biasimage

    if darkfile is not None:
        hbeat.start("Reading Dark Image %s"%darkfile)
        darkimage=imIO.read_image(darkfile)
        convdict["DarkImage"]=darkimage

    if pixelflatfile:
        hbeat.start("Reading PixelFlat Image %s"%pixelflatfile)
        pixelflatimage=imIO.read_image(pixelflatfile)
        convdict["PixelFlat"]=pixelflatimage

    if fiberflatimagefile:
        hbeat.start("Reading FiberFlat Image %s"%fiberflatimagefile)
        fiberflatimage=imIO.read_image(fiberflatimagefile)
        convdict["FiberFlatImage"]=fiberflatimage

    if arclampimagefile:
        hbeat.start("Reading ArcLampImage %s"%arclampimagefile)
        arclampimage=imIO.read_image(arclampimagefile)
        convdict["ArcLampImage"]=arclampimage

    if fiberflatfile:
        hbeat.start("Reading FiberFlat %s"%fiberflatfile)
        fiberflat=ffIO.read_fiberflat(fiberflatfile)
        convdict["FiberFlatFile"]=fiberflat

    if skyfile:
        hbeat.start("Reading SkyModel file %s"%skyfile)
        skymodel=skyIO.read_sky(skyfile)
        convdict["SkyFile"]=skymodel

    if dumpintermediates:
        convdict["DumpIntermediates"]=dumpintermediates
   
    hbeat.stop("Finished reading all static files")

    img=inp
    convdict["rawimage"]=img
    pipeline=[]
    for step in config["PipeLine"]:
        pa=getobject(step["PA"],log)
        if len(pipeline) == 0:
            if not pa.is_compatible(type(img)):
                log.critical("Pipeline configuration is incorrect! check configuration %s %s"%(img,pa.is_compatible(img)))
                sys.exit("Wrong pipeline configuration")
        else:
            if not pa.is_compatible(pipeline[-1][0].get_output_type()):
                log.critical("Pipeline configuration is incorrect! check configuration")
                log.critical("Can't connect input of %s to output of %s. Incompatible types"%(pa.name,pipeline[-1][0].name))
                sys.exit("Wrong pipeline configuration")
        qas=[]
        for q in step["QAs"]:
            qa=getobject(q,log)
            if not qa.is_compatible(pa.get_output_type()):
                log.warning("QA %s can not be used for output of %s. Skipping expecting %s got %s %s"%(qa.name,pa.name,qa.__inpType__,pa.get_output_type(),qa.is_compatible(pa.get_output_type())))
            else:
                qas.append(qa)
        pipeline.append([pa,qas])
    return pipeline,convdict
コード例 #2
0
def simulate_one_healpix(ifilename, args, model, obsconditions,
                         decam_and_wise_filters, footprint_healpix_weight,
                         footprint_healpix_nside, seed):

    log = get_logger()

    # set seed now
    # we need a seed per healpix because
    # the spectra simulator REQUIRES a seed
    np.random.seed(seed)

    healpix = 0
    nside = 0
    vals = os.path.basename(ifilename).split(".")[0].split("-")
    if len(vals) < 3:
        log.error("Cannot guess nside and healpix from filename {}".format(
            ifilename))
        raise ValueError(
            "Cannot guess nside and healpix from filename {}".format(
                ifilename))
    try:
        healpix = int(vals[-1])
        nside = int(vals[-2])
    except ValueError:
        raise ValueError(
            "Cannot guess nside and healpix from filename {}".format(
                ifilename))

    zbest_filename = None
    if args.outfile:
        ofilename = args.outfile
    else:
        ofilename = os.path.join(
            args.outdir,
            "{}/{}/spectra-{}-{}.fits".format(healpix // 100, healpix, nside,
                                              healpix))
    pixdir = os.path.dirname(ofilename)

    if args.zbest:
        zbest_filename = os.path.join(
            pixdir, "zbest-{}-{}.fits".format(nside, healpix))

    if not args.overwrite:
        # check whether output exists or not
        if args.zbest:
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename):
                log.info("skip existing {} and {}".format(
                    ofilename, zbest_filename))
                return
        else:  # only test spectra file
            if os.path.isfile(ofilename):
                log.info("skip existing {}".format(ofilename))
                return

    log.info("Read skewers in {}, random seed = {}".format(ifilename, seed))
    trans_wave, transmission, metadata = read_lya_skewers(ifilename)
    ok = np.where((metadata['Z'] >= args.zmin)
                  & (metadata['Z'] <= args.zmax))[0]
    transmission = transmission[ok]
    metadata = metadata[:][ok]

    # create quasars

    if args.desi_footprint:
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside,
                                                metadata["RA"],
                                                metadata["DEC"])
        selection = np.where(
            footprint_healpix_weight[footprint_healpix] > 0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(
            transmission.shape[0], selection.size))
        if selection.size == 0:
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]

    nqso = transmission.shape[0]
    if args.downsampling is not None:
        if args.downsampling <= 0 or args.downsampling > 1:
            log.error(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
            raise ValueError(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
        indices = np.where(np.random.uniform(size=nqso) < args.downsampling)[0]
        if indices.size == 0:
            log.warning(
                "Down sampling from {} to 0 (by chance I presume)".format(
                    nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        nqso = transmission.shape[0]

    if args.nmax is not None:
        if args.nmax < nqso:
            log.info(
                "Limit number of QSOs from {} to nmax={} (random subsample)".
                format(nqso, args.nmax))
            # take a random subsample
            indices = (np.random.uniform(size=args.nmax) * nqso).astype(int)
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            nqso = args.nmax

    if args.target_selection or args.mags:
        wanted_min_wave = 3329.  # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
        wanted_max_wave = 55501.  # needed to compute magnitudes for wise2010-W2

        if trans_wave[0] > wanted_min_wave:
            log.info(
                "Increase wavelength range from {}:{} to {}:{} to compute magnitudes"
                .format(int(trans_wave[0]), int(trans_wave[-1]),
                        int(wanted_min_wave), int(trans_wave[-1])))
            # pad with zeros at short wavelength because we assume transmission = 0
            # and we don't need any wavelength resolution here
            new_trans_wave = np.append([wanted_min_wave, trans_wave[0] - 0.01],
                                       trans_wave)
            new_transmission = np.zeros(
                (transmission.shape[0], new_trans_wave.size))
            new_transmission[:, 2:] = transmission
            trans_wave = new_trans_wave
            transmission = new_transmission

        if trans_wave[-1] < wanted_max_wave:
            log.info(
                "Increase wavelength range from {}:{} to {}:{} to compute magnitudes"
                .format(int(trans_wave[0]), int(trans_wave[-1]),
                        int(trans_wave[0]), int(wanted_max_wave)))
            # pad with ones at long wavelength because we assume transmission = 1
            coarse_dwave = 2.  # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
            n = int((wanted_max_wave - trans_wave[-1]) / coarse_dwave) + 1
            new_trans_wave = np.append(
                trans_wave,
                np.linspace(trans_wave[-1] + coarse_dwave,
                            trans_wave[-1] + coarse_dwave * (n + 1), n))
            new_transmission = np.ones(
                (transmission.shape[0], new_trans_wave.size))
            new_transmission[:, :trans_wave.size] = transmission
            trans_wave = new_trans_wave
            transmission = new_transmission

    log.info("Simulate {} QSOs".format(nqso))
    tmp_qso_flux, tmp_qso_wave, meta = model.make_templates(
        nmodel=nqso,
        redshift=metadata['Z'],
        lyaforest=False,
        nocolorcuts=True,
        noresample=True,
        seed=seed)

    log.info("Resample to transmission wavelength grid")
    # because we don't want to alter the transmission field with resampling here
    qso_flux = np.zeros((tmp_qso_flux.shape[0], trans_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = np.interp(trans_wave, tmp_qso_wave, tmp_qso_flux[q])
    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    log.info("Apply lya")
    tmp_qso_flux = apply_lya_transmission(tmp_qso_wave, tmp_qso_flux,
                                          trans_wave, transmission)

    bbflux = None
    if args.target_selection or args.mags:
        bands = ['FLUX_G', 'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2']
        bbflux = dict()
        # need to recompute the magnitudes to account for lya transmission
        log.info("Compute QSO magnitudes")
        maggies = decam_and_wise_filters.get_ab_maggies(
            1e-17 * tmp_qso_flux, tmp_qso_wave)
        for band, filt in zip(bands, [
                'decam2014-g', 'decam2014-r', 'decam2014-z', 'wise2010-W1',
                'wise2010-W2'
        ]):

            bbflux[band] = np.ma.getdata(1e9 * maggies[filt])  # nanomaggies

    if args.target_selection:
        log.info("Apply target selection")
        isqso = isQSO_colors(gflux=bbflux['FLUX_G'],
                             rflux=bbflux['FLUX_R'],
                             zflux=bbflux['FLUX_Z'],
                             w1flux=bbflux['FLUX_W1'],
                             w2flux=bbflux['FLUX_W2'])
        log.info("Target selection: {}/{} QSOs selected".format(
            np.sum(isqso), nqso))
        selection = np.where(isqso)[0]
        if selection.size == 0: return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata = metadata[:][selection]
        meta = meta[:][selection]
        for band in bands:
            bbflux[band] = bbflux[band][selection]
        nqso = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # we need a linear grid. for this resampling we take care of integrating in bins
    # we do not do a simple interpolation
    qso_wave = np.linspace(args.wmin, args.wmax,
                           int((args.wmax - args.wmin) / args.dwave) + 1)
    qso_flux = np.zeros((tmp_qso_flux.shape[0], qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = resample_flux(qso_wave, tmp_qso_wave, tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    pixdir = os.path.dirname(ofilename)
    if len(pixdir) > 0:
        if not os.path.isdir(pixdir):
            log.info("Creating dir {}".format(pixdir))
            os.makedirs(pixdir)

    if "MOCKID" in metadata.dtype.names:
        #log.warning("Using MOCKID as TARGETID")
        targetid = np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names:
        log.warning("Using ID as TARGETID")
        targetid = np.array(metadata["ID"]).astype(int)
    else:
        log.warning("No TARGETID")
        targetid = None

    log.warning("Assuming the healpix scheme is 'NESTED'")
    meta = {"HPXNSIDE": nside, "HPXPIXEL": healpix, "HPXNEST": True}

    if args.target_selection or args.mags:
        # today we write mags because that's what is in the fibermap
        mags = np.zeros((qso_flux.shape[0], 5))
        for i, band in enumerate(bands):
            jj = (bbflux[band] > 0)
            mags[jj,
                 i] = 22.5 - 2.5 * np.log10(bbflux[band][jj])  # AB magnitudes
        fibermap_columns = {"MAG": mags}
    else:
        fibermap_columns = None

    sim_spectra(qso_wave,
                qso_flux,
                args.program,
                obsconditions=obsconditions,
                spectra_filename=ofilename,
                sourcetype="qso",
                skyerr=args.skyerr,
                ra=metadata["RA"],
                dec=metadata["DEC"],
                targetid=targetid,
                meta=meta,
                seed=seed,
                fibermap_columns=fibermap_columns)

    if args.zbest:
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)

        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [('CHI2', 'f8'), ('COEFF', 'f8', (4, )), ('Z', 'f8'),
                   ('ZERR', 'f8'), ('ZWARN', 'i8'), ('SPECTYPE', (str, 96)),
                   ('SUBTYPE', (str, 16)), ('TARGETID', 'i8'),
                   ('DELTACHI2', 'f8'), ('BRICKNAME', (str, 8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest["CHI2"][:] = 0.
        zbest["Z"] = metadata['Z']
        zbest["ZERR"][:] = 0.
        zbest["ZWARN"][:] = 0
        zbest["SPECTYPE"][:] = "QSO"
        zbest["SUBTYPE"][:] = ""
        zbest["TARGETID"] = fibermap["TARGETID"]
        zbest["DELTACHI2"][:] = 25.

        hzbest = pyfits.convenience.table_to_hdu(zbest)
        hzbest.name = "ZBEST"
        hfmap = pyfits.convenience.table_to_hdu(fibermap)
        hfmap.name = "FIBERMAP"

        hdulist = pyfits.HDUList([pyfits.PrimaryHDU(), hzbest, hfmap])
        hdulist.writeto(zbest_filename, clobber=True)
        hdulist.close()  # see if this helps with memory issue
コード例 #3
0
ファイル: quickquasars.py プロジェクト: LuzGarciaP/desisim
def simulate_one_healpix(ifilename,
                         args,
                         model,
                         obsconditions,
                         decam_and_wise_filters,
                         footprint_healpix_weight,
                         footprint_healpix_nside,
                         seed,
                         bal=None):
    log = get_logger()

    # set seed now
    # we need a seed per healpix because
    # the spectra simulator REQUIRES a seed
    np.random.seed(seed)

    # read the header of the tranmission file to find the healpix pixel number, nside
    # and if we are lucky the scheme.
    # if this fails, try to guess it from the filename (for backward compatibility)
    healpix = -1
    nside = -1
    hpxnest = True

    hdulist = pyfits.open(ifilename)
    if "METADATA" in hdulist:
        head = hdulist["METADATA"].header
        for k in ["HPXPIXEL", "PIXNUM"]:
            if k in head:
                healpix = int(head[k])
                log.info("healpix={}={}".format(k, healpix))
                break
        for k in ["HPXNSIDE", "NSIDE"]:
            if k in head:
                nside = int(head[k])
                log.info("nside={}={}".format(k, nside))
                break
        for k in ["HPXNEST", "NESTED", "SCHEME"]:
            if k in head:
                if k == "SCHEME":
                    hpxnest = (head[k] == "NEST")
                else:
                    hpxnest = bool(head[k])
                log.info("hpxnest from {} = {}".format(k, hpxnest))
                break
    if healpix >= 0 and nside < 0:
        log.error("Read healpix in header but not nside.")
        raise ValueError("Read healpix in header but not nside.")

    if healpix < 0:
        vals = os.path.basename(ifilename).split(".")[0].split("-")
        if len(vals) < 3:
            log.error("Cannot guess nside and healpix from filename {}".format(
                ifilename))
            raise ValueError(
                "Cannot guess nside and healpix from filename {}".format(
                    ifilename))
        try:
            healpix = int(vals[-1])
            nside = int(vals[-2])
        except ValueError:
            raise ValueError(
                "Cannot guess nside and healpix from filename {}".format(
                    ifilename))
        log.warning(
            "Guessed healpix and nside from filename, assuming the healpix scheme is 'NESTED'"
        )

    zbest_filename = None
    if args.outfile:
        ofilename = args.outfile
    else:
        ofilename = os.path.join(
            args.outdir,
            "{}/{}/spectra-{}-{}.fits".format(healpix // 100, healpix, nside,
                                              healpix))
    pixdir = os.path.dirname(ofilename)

    if args.zbest:
        zbest_filename = os.path.join(
            pixdir, "zbest-{}-{}.fits".format(nside, healpix))

    if not args.overwrite:
        # check whether output exists or not
        if args.zbest:
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename):
                log.info("skip existing {} and {}".format(
                    ofilename, zbest_filename))
                return
        else:  # only test spectra file
            if os.path.isfile(ofilename):
                log.info("skip existing {}".format(ofilename))
                return

    log.info("Read skewers in {}, random seed = {}".format(ifilename, seed))

    ##ALMA: It reads only the skewers only if there are no DLAs or if they are added randomly.
    if (not args.dla or args.dla == 'random'):
        trans_wave, transmission, metadata = read_lya_skewers(ifilename)
        ok = np.where((metadata['Z'] >= args.zmin)
                      & (metadata['Z'] <= args.zmax))[0]
        transmission = transmission[ok]
        metadata = metadata[:][ok]
##ALMA:Added to read dla_info

    elif (args.dla == 'file'):
        log.info("Read DLA information in {}".format(ifilename))
        trans_wave, transmission, metadata, dla_info = read_lya_skewers(
            ifilename, dla_='TRUE')
        ok = np.where((metadata['Z'] >= args.zmin)
                      & (metadata['Z'] <= args.zmax))[0]
        transmission = transmission[ok]
        metadata = metadata[:][ok]
    else:
        log.error(
            'Not a valid option to add DLAs. Valid options are "random" or "file"'
        )
        sys.exit(1)

    if args.dla:
        dla_NHI, dla_z, dla_id = [], [], []
        dla_filename = os.path.join(pixdir,
                                    "dla-{}-{}.fits".format(nside, healpix))

    if args.desi_footprint:
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside,
                                                metadata["RA"],
                                                metadata["DEC"])
        selection = np.where(
            footprint_healpix_weight[footprint_healpix] > 0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(
            transmission.shape[0], selection.size))
        if selection.size == 0:
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]

    nqso = transmission.shape[0]
    if args.downsampling is not None:
        if args.downsampling <= 0 or args.downsampling > 1:
            log.error(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
            raise ValueError(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
        indices = np.where(np.random.uniform(size=nqso) < args.downsampling)[0]
        if indices.size == 0:
            log.warning(
                "Down sampling from {} to 0 (by chance I presume)".format(
                    nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        nqso = transmission.shape[0]


##ALMA:added to set transmission to 1 for z>zqso, this can be removed when transmission is corrected.
    for ii in range(len(metadata)):
        transmission[ii][trans_wave > 1215.67 * (metadata[ii]['Z'] + 1)] = 1.0

    if (args.dla == 'file'):
        log.info('Adding DLAs from transmision file')
        min_trans_wave = np.min(trans_wave / 1215.67 - 1)
        for ii in range(len(metadata)):
            if min_trans_wave < metadata[ii]['Z']:
                idd = metadata['MOCKID'][ii]
                dlas = dla_info[dla_info['MOCKID'] == idd]
                dlass = []
                for i in range(len(dlas)):
                    ##Adding only dlas between zqso and 1.95, check again for the next version of London mocks...
                    if (dlas[i]['Z_DLA'] <
                            metadata[ii]['Z']) and (dlas[i]['Z_DLA'] > 1.95):
                        dlass.append(
                            dict(z=dlas[i]['Z_DLA'] + dlas[i]['DZ_DLA'],
                                 N=dlas[i]['N_HI_DLA']))
                if len(dlass) > 0:
                    dla_model = dla_spec(trans_wave, dlass)
                    transmission[ii] = dla_model * transmission[ii]
                    dla_z += [idla['z'] for idla in dlass]
                    dla_NHI += [idla['N'] for idla in dlass]
                    dla_id += [idd] * len(dlass)

    elif (args.dla == 'random'):
        log.info('Adding DLAs randomly')
        min_trans_wave = np.min(trans_wave / 1215.67 - 1)
        for ii in range(len(metadata)):
            if min_trans_wave < metadata[ii]['Z']:
                idd = metadata['MOCKID'][ii]
                dlass, dla_model = insert_dlas(trans_wave, metadata[ii]['Z'])
                if len(dlass) > 0:
                    transmission[ii] = dla_model * transmission[ii]
                    dla_z += [idla['z'] for idla in dlass]
                    dla_NHI += [idla['N'] for idla in dlass]
                    dla_id += [idd] * len(dlass)

    if args.dla:
        if len(dla_id) > 0:
            dla_meta = Table()
            dla_meta['NHI'] = dla_NHI
            dla_meta['z'] = dla_z
            dla_meta['ID'] = dla_id

    if args.nmax is not None:
        if args.nmax < nqso:
            log.info(
                "Limit number of QSOs from {} to nmax={} (random subsample)".
                format(nqso, args.nmax))
            # take a random subsample
            indices = (np.random.uniform(size=args.nmax) * nqso).astype(int)
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            nqso = args.nmax

            if args.dla:
                dla_meta = dla_meta[:][dla_meta['ID'] == metadata['MOCKID']]

    if args.target_selection or args.mags:
        wanted_min_wave = 3329.  # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
        wanted_max_wave = 55501.  # needed to compute magnitudes for wise2010-W2

        if trans_wave[0] > wanted_min_wave:
            log.info(
                "Increase wavelength range from {}:{} to {}:{} to compute magnitudes"
                .format(int(trans_wave[0]), int(trans_wave[-1]),
                        int(wanted_min_wave), int(trans_wave[-1])))
            # pad with zeros at short wavelength because we assume transmission = 0
            # and we don't need any wavelength resolution here
            new_trans_wave = np.append([wanted_min_wave, trans_wave[0] - 0.01],
                                       trans_wave)
            new_transmission = np.zeros(
                (transmission.shape[0], new_trans_wave.size))
            new_transmission[:, 2:] = transmission
            trans_wave = new_trans_wave
            transmission = new_transmission

        if trans_wave[-1] < wanted_max_wave:
            log.info(
                "Increase wavelength range from {}:{} to {}:{} to compute magnitudes"
                .format(int(trans_wave[0]), int(trans_wave[-1]),
                        int(trans_wave[0]), int(wanted_max_wave)))
            # pad with ones at long wavelength because we assume transmission = 1
            coarse_dwave = 2.  # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
            n = int((wanted_max_wave - trans_wave[-1]) / coarse_dwave) + 1
            new_trans_wave = np.append(
                trans_wave,
                np.linspace(trans_wave[-1] + coarse_dwave,
                            trans_wave[-1] + coarse_dwave * (n + 1), n))
            new_transmission = np.ones(
                (transmission.shape[0], new_trans_wave.size))
            new_transmission[:, :trans_wave.size] = transmission
            trans_wave = new_trans_wave
            transmission = new_transmission

    log.info("Simulate {} QSOs".format(nqso))
    tmp_qso_flux, tmp_qso_wave, meta = model.make_templates(
        nmodel=nqso,
        redshift=metadata['Z'],
        lyaforest=False,
        nocolorcuts=True,
        noresample=True,
        seed=seed)

    log.info("Resample to transmission wavelength grid")
    # because we don't want to alter the transmission field with resampling here
    qso_flux = np.zeros((tmp_qso_flux.shape[0], trans_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = np.interp(trans_wave, tmp_qso_wave, tmp_qso_flux[q])
    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    ##To add BALs to be checked by Luz and Jaime
    if (args.balprob):
        if (args.balprob <= 1. and args.balprob > 0):
            log.info("Adding BALs with probability {}".format(args.balprob))
            tmp_qso_flux, meta_bal = bal.insert_bals(tmp_qso_wave,
                                                     tmp_qso_flux,
                                                     metadata['Z'],
                                                     balprob=args.balprob,
                                                     seed=seed)
        else:
            log.error("Probability to add BALs is not between 0 and 1")
            sys.exit(1)

    log.info("Apply lya")
    tmp_qso_flux = apply_lya_transmission(tmp_qso_wave, tmp_qso_flux,
                                          trans_wave, transmission)

    if args.metals is not None:
        lstMetals = ''
        for m in args.metals:
            lstMetals += m + ', '
        log.info("Apply metals: {}".format(lstMetals[:-2]))
        tmp_qso_flux = apply_metals_transmission(tmp_qso_wave, tmp_qso_flux,
                                                 trans_wave, transmission,
                                                 args.metals)

    bbflux = None
    if args.target_selection or args.mags:
        bands = ['FLUX_G', 'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2']
        bbflux = dict()
        # need to recompute the magnitudes to account for lya transmission
        log.info("Compute QSO magnitudes")
        maggies = decam_and_wise_filters.get_ab_maggies(
            1e-17 * tmp_qso_flux, tmp_qso_wave)
        for band, filt in zip(bands, [
                'decam2014-g', 'decam2014-r', 'decam2014-z', 'wise2010-W1',
                'wise2010-W2'
        ]):

            bbflux[band] = np.ma.getdata(1e9 * maggies[filt])  # nanomaggies

    if args.target_selection:
        log.info("Apply target selection")
        isqso = isQSO_colors(gflux=bbflux['FLUX_G'],
                             rflux=bbflux['FLUX_R'],
                             zflux=bbflux['FLUX_Z'],
                             w1flux=bbflux['FLUX_W1'],
                             w2flux=bbflux['FLUX_W2'])
        log.info("Target selection: {}/{} QSOs selected".format(
            np.sum(isqso), nqso))
        selection = np.where(isqso)[0]
        if selection.size == 0: return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata = metadata[:][selection]
        meta = meta[:][selection]
        for band in bands:
            bbflux[band] = bbflux[band][selection]
        nqso = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # we need a linear grid. for this resampling we take care of integrating in bins
    # we do not do a simple interpolation
    qso_wave = np.linspace(args.wmin, args.wmax,
                           int((args.wmax - args.wmin) / args.dwave) + 1)
    qso_flux = np.zeros((tmp_qso_flux.shape[0], qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = resample_flux(qso_wave, tmp_qso_wave, tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    pixdir = os.path.dirname(ofilename)
    if len(pixdir) > 0:
        if not os.path.isdir(pixdir):
            log.info("Creating dir {}".format(pixdir))
            os.makedirs(pixdir)

    if "MOCKID" in metadata.dtype.names:
        #log.warning("Using MOCKID as TARGETID")
        targetid = np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names:
        log.warning("Using ID as TARGETID")
        targetid = np.array(metadata["ID"]).astype(int)
    else:
        log.warning("No TARGETID")
        targetid = None

    meta = {"HPXNSIDE": nside, "HPXPIXEL": healpix, "HPXNEST": hpxnest}

    if args.target_selection or args.mags:
        # today we write mags because that's what is in the fibermap
        mags = np.zeros((qso_flux.shape[0], 5))
        for i, band in enumerate(bands):
            jj = (bbflux[band] > 0)
            mags[jj,
                 i] = 22.5 - 2.5 * np.log10(bbflux[band][jj])  # AB magnitudes
        fibermap_columns = {"MAG": mags}
    else:
        fibermap_columns = None

    sim_spectra(qso_wave,
                qso_flux,
                args.program,
                obsconditions=obsconditions,
                spectra_filename=ofilename,
                sourcetype="qso",
                skyerr=args.skyerr,
                ra=metadata["RA"],
                dec=metadata["DEC"],
                targetid=targetid,
                meta=meta,
                seed=seed,
                fibermap_columns=fibermap_columns)

    if args.zbest:
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)

        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [('CHI2', 'f8'), ('COEFF', 'f8', (4, )), ('Z', 'f8'),
                   ('ZERR', 'f8'), ('ZWARN', 'i8'), ('SPECTYPE', (str, 96)),
                   ('SUBTYPE', (str, 16)), ('TARGETID', 'i8'),
                   ('DELTACHI2', 'f8'), ('BRICKNAME', (str, 8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest["CHI2"][:] = 0.
        zbest["Z"] = metadata['Z']
        zbest["ZERR"][:] = 0.
        zbest["ZWARN"][:] = 0
        zbest["SPECTYPE"][:] = "QSO"
        zbest["SUBTYPE"][:] = ""
        zbest["TARGETID"] = fibermap["TARGETID"]
        zbest["DELTACHI2"][:] = 25.

        hzbest = pyfits.convenience.table_to_hdu(zbest)
        hzbest.name = "ZBEST"
        hfmap = pyfits.convenience.table_to_hdu(fibermap)
        hfmap.name = "FIBERMAP"

        hdulist = pyfits.HDUList([pyfits.PrimaryHDU(), hzbest, hfmap])
        hdulist.writeto(zbest_filename, clobber=True)
        hdulist.close()  # see if this helps with memory issue

        if args.dla:
            #This will change according to discussion
            log.info("Updating the spectra file to add DLA metadata {}".format(
                ofilename))
            hdudla = pyfits.table_to_hdu(dla_meta)
            hdudla.name = "DLA_META"
            hdul = pyfits.open(ofilename, mode='update')
            hdul.append(hdudla)
            hdul.flush()
            hdul.close()
コード例 #4
0
def simulate_one_healpix(ifilename,args,model,obsconditions,decam_and_wise_filters,
                         bassmzls_and_wise_filters,footprint_healpix_weight,
                         footprint_healpix_nside,
                         bal=None,sfdmap=None,eboss=None) :
    log = get_logger()

    # open filename and extract basic HEALPix information
    pixel, nside, hpxnest = get_healpix_info(ifilename)

    # using global seed (could be None) get seed for this particular pixel
    global_seed = args.seed
    seed = get_pixel_seed(pixel, nside, global_seed)
    # use this seed to generate future random numbers
    np.random.seed(seed)

    # get output file (we will write there spectra for this HEALPix pixel)
    ofilename = get_spectra_filename(args,nside,pixel)
    # get directory name (we will also write there zbest file)
    pixdir = os.path.dirname(ofilename)

    # get filename for truth file
    truth_filename = get_truth_filename(args,pixdir,nside,pixel)

    # get filename for zbest file
    zbest_filename = get_zbest_filename(args,pixdir,nside,pixel)

    if not args.overwrite :
        # check whether output exists or not
        if args.zbest :
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename) :
                log.info("skip existing {} and {}".format(ofilename,zbest_filename))
                return
        else : # only test spectra file
            if os.path.isfile(ofilename) :
                log.info("skip existing {}".format(ofilename))
                return

    # create sub-directories if required
    if len(pixdir)>0 :
        if not os.path.isdir(pixdir) :
            log.info("Creating dir {}".format(pixdir))
            os.makedirs(pixdir)

    log.info("Read skewers in {}, random seed = {}".format(ifilename,seed))

    # Read transmission from files. It might include DLA information, and it
    # might add metal transmission as well (from the HDU file).
    log.info("Read transmission file {}".format(ifilename))

    trans_wave, transmission, metadata, dla_info = read_lya_skewers(ifilename,read_dlas=(args.dla=='file'),add_metals=args.metals_from_file,add_lyb=args.add_LYB)

    ### Add Finger-of-God, before generate the continua
    log.info("Add FOG to redshift with sigma {} to quasar redshift".format(args.sigma_kms_fog))
    DZ_FOG = args.sigma_kms_fog/c*(1.+metadata['Z'])*np.random.normal(0,1,metadata['Z'].size)
    metadata['Z'] += DZ_FOG

    ### Select quasar within a given redshift range
    w = (metadata['Z']>=args.zmin) & (metadata['Z']<=args.zmax)
    transmission = transmission[w]
    metadata = metadata[:][w]
    DZ_FOG = DZ_FOG[w]

    # option to make for BOSS+eBOSS
    if not eboss is None:
        if args.downsampling or args.desi_footprint:
            raise ValueError("eboss option can not be run with "
                    +"desi_footprint or downsampling")

        # Get the redshift distribution from SDSS
        selection = sdss_subsample_redshift(metadata["RA"],metadata["DEC"],metadata['Z'],eboss['redshift'])
        log.info("Select QSOs in BOSS+eBOSS redshift distribution {} -> {}".format(metadata['Z'].size,selection.sum()))
        if selection.sum()==0:
            log.warning("No intersection with BOSS+eBOSS redshift distribution")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]

        # figure out the density of all quasars
        N_highz = metadata['Z'].size
        # area of healpix pixel, in degrees
        area_deg2 = healpy.pixelfunc.nside2pixarea(nside,degrees=True)
        input_highz_dens_deg2 = N_highz/area_deg2
        selection = sdss_subsample(metadata["RA"], metadata["DEC"],
                        input_highz_dens_deg2,eboss['footprint'])
        log.info("Select QSOs in BOSS+eBOSS footprint {} -> {}".format(transmission.shape[0],selection.size))
        if selection.size == 0 :
            log.warning("No intersection with BOSS+eBOSS footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]

    if args.desi_footprint :
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside, metadata["RA"], metadata["DEC"])
        selection = np.where(footprint_healpix_weight[footprint_healpix]>0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(transmission.shape[0],selection.size))
        if selection.size == 0 :
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]



    nqso=transmission.shape[0]
    if args.downsampling is not None :
        if args.downsampling <= 0 or  args.downsampling > 1 :
           log.error("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
           raise ValueError("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
        indices = np.where(np.random.uniform(size=nqso)<args.downsampling)[0]
        if indices.size == 0 :
            log.warning("Down sampling from {} to 0 (by chance I presume)".format(nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        DZ_FOG = DZ_FOG[indices]
        nqso = transmission.shape[0]

    if args.nmax is not None :
        if args.nmax < nqso :
            log.info("Limit number of QSOs from {} to nmax={} (random subsample)".format(nqso,args.nmax))
            # take a random subsample
            indices = np.random.choice(np.arange(nqso),args.nmax,replace=False)  ##Use random.choice instead of random.uniform (rarely but it does cause a duplication of qsos) 
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            DZ_FOG = DZ_FOG[indices]
            nqso = args.nmax

    # In previous versions of the London mocks we needed to enforce F=1 for
    # z > z_qso here, but this is not needed anymore. Moreover, now we also
    # have metal absorption that implies F < 1 for z > z_qso
    #for ii in range(len(metadata)):
    #    transmission[ii][trans_wave>lambda_RF_LYA*(metadata[ii]['Z']+1)]=1.0

    # if requested, add DLA to the transmission skewers
    if args.dla is not None :

        # if adding random DLAs, we will need a new random generator
        if args.dla=='random':
            log.info('Adding DLAs randomly')
            random_state_just_for_dlas = np.random.RandomState(seed)
        elif args.dla=='file':
            log.info('Adding DLAs from transmission file')
        else:
            log.error("Wrong option for args.dla: "+args.dla)
            sys.exit(1)

        # if adding DLAs, the information will be printed here
        dla_filename=os.path.join(pixdir,"dla-{}-{}.fits".format(nside,pixel))
        dla_NHI, dla_z, dla_qid,dla_id = [], [], [],[]

        # identify minimum Lya redshift in transmission files
        min_lya_z = np.min(trans_wave/lambda_RF_LYA - 1)

        # loop over quasars in pixel

        for ii in range(len(metadata)):

            # quasars with z < min_z will not have any DLA in spectrum
            if min_lya_z>metadata['Z'][ii]: continue

            # quasar ID
            idd=metadata['MOCKID'][ii]
            dlas=[]

            if args.dla=='file':
                for dla in dla_info[dla_info['MOCKID']==idd]:

                    # Adding only DLAs with z < zqso
                    if dla['Z_DLA_RSD']>=metadata['Z'][ii]: continue
                    dlas.append(dict(z=dla['Z_DLA_RSD'],N=dla['N_HI_DLA'],dlaid=dla['DLAID']))
                transmission_dla = dla_spec(trans_wave,dlas)

            elif args.dla=='random':
                dlas, transmission_dla = insert_dlas(trans_wave, metadata['Z'][ii], rstate=random_state_just_for_dlas)
                for idla in dlas:
                   idla['dlaid']+=idd*1000      #Added to have unique DLA ids. Same format as DLAs from file.

            # multiply transmissions and store information for the DLA file
            if len(dlas)>0:
                transmission[ii] = transmission_dla * transmission[ii]
                dla_z += [idla['z'] for idla in dlas]
                dla_NHI += [idla['N'] for idla in dlas]
                dla_id += [idla['dlaid'] for idla in dlas]
                dla_qid += [idd]*len(dlas)
        log.info('Added {} DLAs'.format(len(dla_id)))
        # write file with DLA information
        if len(dla_id)>0:
            dla_meta=Table()
            dla_meta['NHI'] = dla_NHI
            dla_meta['Z_DLA'] = dla_z  #This is Z_DLA_RSD in transmision.
            dla_meta['TARGETID']=dla_qid
            dla_meta['DLAID'] = dla_id
            hdu_dla = pyfits.convenience.table_to_hdu(dla_meta)
            hdu_dla.name="DLA_META"
            del(dla_meta)
            log.info("DLA metadata to be saved in {}".format(truth_filename))
        else:
            hdu_dla=pyfits.PrimaryHDU()
            hdu_dla.name="DLA_META"

    # if requested, extend transmission skewers to cover full spectrum
    if args.target_selection or args.bbflux :
        wanted_min_wave = 3329. # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
        wanted_max_wave = 55501. # needed to compute magnitudes for wise2010-W2

        if trans_wave[0]>wanted_min_wave :
            log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(wanted_min_wave),int(trans_wave[-1])))
            # pad with ones at short wavelength, we assume F = 1 for z <~ 1.7
            # we don't need any wavelength resolution here
            new_trans_wave = np.append([wanted_min_wave,trans_wave[0]-0.01],trans_wave)
            new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
            new_transmission[:,2:] = transmission
            trans_wave   = new_trans_wave
            transmission = new_transmission

        if trans_wave[-1]<wanted_max_wave :
            log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(trans_wave[0]),int(wanted_max_wave)))
            # pad with ones at long wavelength because we assume F = 1
            coarse_dwave = 2. # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
            n = int((wanted_max_wave-trans_wave[-1])/coarse_dwave)+1
            new_trans_wave = np.append(trans_wave,np.linspace(trans_wave[-1]+coarse_dwave,trans_wave[-1]+coarse_dwave*(n+1),n))
            new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
            new_transmission[:,:trans_wave.size] = transmission
            trans_wave   = new_trans_wave
            transmission = new_transmission

    # whether to use QSO or SIMQSO to generate quasar continua.  Simulate
    # spectra in the north vs south separately because they're on different
    # photometric systems.
    south = np.where( is_south(metadata['DEC']) )[0]
    north = np.where( ~is_south(metadata['DEC']) )[0]
    meta, qsometa = empty_metatable(nqso, objtype='QSO', simqso=not args.no_simqso)
    if args.no_simqso:
        log.info("Simulate {} QSOs with QSO templates".format(nqso))
        tmp_qso_flux = np.zeros([nqso, len(model.eigenwave)], dtype='f4')
        tmp_qso_wave = np.zeros_like(tmp_qso_flux)
    else:
        log.info("Simulate {} QSOs with SIMQSO templates".format(nqso))
        tmp_qso_flux = np.zeros([nqso, len(model.basewave)], dtype='f4')
        tmp_qso_wave = model.basewave

    for these, issouth in zip( (north, south), (False, True) ):

        # number of quasars in these
        nt = len(these)
        if nt<=0: continue

        if not eboss is None:
            # for eBOSS, generate only quasars with r<22
            magrange = (17.0, 21.3)
            _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
                = model.make_templates(nmodel=nt,
                    redshift=metadata['Z'][these], magrange=magrange,
                    lyaforest=False, nocolorcuts=True,
                    noresample=True, seed=seed, south=issouth)
        else:
            _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
                = model.make_templates(nmodel=nt,
                    redshift=metadata['Z'][these],
                    lyaforest=False, nocolorcuts=True,
                    noresample=True, seed=seed, south=issouth)

        _meta['TARGETID'] = metadata['MOCKID'][these]
        _qsometa['TARGETID'] = metadata['MOCKID'][these]
        meta[these] = _meta
        qsometa[these] = _qsometa
        tmp_qso_flux[these, :] = _tmp_qso_flux

        if args.no_simqso:
            tmp_qso_wave[these, :] = _tmp_qso_wave

    log.info("Resample to transmission wavelength grid")
    qso_flux=np.zeros((tmp_qso_flux.shape[0],trans_wave.size))
    if args.no_simqso:
        for q in range(tmp_qso_flux.shape[0]) :
            qso_flux[q]=np.interp(trans_wave,tmp_qso_wave[q],tmp_qso_flux[q])
    else:
        for q in range(tmp_qso_flux.shape[0]) :
            qso_flux[q]=np.interp(trans_wave,tmp_qso_wave,tmp_qso_flux[q])

    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    # if requested, add BAL features to the quasar continua
    if args.balprob:
        if args.balprob<=1. and args.balprob >0:
            log.info("Adding BALs with probability {}".format(args.balprob))
            # save current random state
            rnd_state = np.random.get_state()
            tmp_qso_flux,meta_bal=bal.insert_bals(tmp_qso_wave,tmp_qso_flux, metadata['Z'],
                                                  balprob=args.balprob,seed=seed)
            # restore random state to get the same random numbers later
            # as when we don't insert BALs
            np.random.set_state(rnd_state)
            meta_bal['TARGETID'] = metadata['MOCKID']
            w = meta_bal['TEMPLATEID']!=-1
            meta_bal = meta_bal[:][w]
            hdu_bal=pyfits.convenience.table_to_hdu(meta_bal); hdu_bal.name="BAL_META"
            del meta_bal
        else:
            balstr=str(args.balprob)
            log.error("BAL probability is not between 0 and 1 : "+balstr)
            sys.exit(1)

    # Multiply quasar continua by transmitted flux fraction
    # (at this point transmission file might include Ly-beta, metals and DLAs)
    log.info("Apply transmitted flux fraction")
    if not args.no_transmission:
        tmp_qso_flux = apply_lya_transmission(tmp_qso_wave,tmp_qso_flux,
                            trans_wave,transmission)

    # if requested, compute metal transmission on the fly
    # (if not included already from the transmission file)
    if args.metals is not None:
        if args.metals_from_file :
            log.error('you cannot add metals twice')
            raise ValueError('you cannot add metals twice')
        if args.no_transmission:
            log.error('you cannot add metals if asking for no-transmission')
            raise ValueError('can not add metals if using no-transmission')
        lstMetals = ''
        for m in args.metals: lstMetals += m+', '
        log.info("Apply metals: {}".format(lstMetals[:-2]))

        tmp_qso_flux = apply_metals_transmission(tmp_qso_wave,tmp_qso_flux,
                            trans_wave,transmission,args.metals)

    # if requested, compute magnitudes and apply target selection.  Need to do
    # this calculation separately for QSOs in the north vs south.
    bbflux=None
    if args.target_selection or args.bbflux :
        bands=['FLUX_G','FLUX_R','FLUX_Z', 'FLUX_W1', 'FLUX_W2']
        bbflux=dict()
        bbflux['SOUTH'] = is_south(metadata['DEC'])
        for band in bands:
            bbflux[band] = np.zeros(nqso)
        # need to recompute the magnitudes to account for lya transmission
        log.info("Compute QSO magnitudes")

        for these, filters in zip( (~bbflux['SOUTH'], bbflux['SOUTH']),
                                   (bassmzls_and_wise_filters, decam_and_wise_filters) ):
            if np.count_nonzero(these) > 0:
                maggies = filters.get_ab_maggies(1e-17 * tmp_qso_flux[these, :], tmp_qso_wave)
                for band, filt in zip( bands, maggies.colnames ):
                    bbflux[band][these] = np.ma.getdata(1e9 * maggies[filt]) # nanomaggies

    if args.target_selection :
        log.info("Apply target selection")
        isqso = np.ones(nqso, dtype=bool)
        for these, issouth in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (False, True) ):
            if np.count_nonzero(these) > 0:
                # optical cuts only if using QSO vs SIMQSO
                isqso[these] &= isQSO_colors(gflux=bbflux['FLUX_G'][these],
                                             rflux=bbflux['FLUX_R'][these],
                                             zflux=bbflux['FLUX_Z'][these],
                                             w1flux=bbflux['FLUX_W1'][these],
                                             w2flux=bbflux['FLUX_W2'][these],
                                             south=issouth, optical=args.no_simqso)

        log.info("Target selection: {}/{} QSOs selected".format(np.sum(isqso),nqso))
        selection=np.where(isqso)[0]
        if selection.size==0 : return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata     = metadata[:][selection]
        meta         = meta[:][selection]
        qsometa      = qsometa[:][selection]
        DZ_FOG      = DZ_FOG[selection]
        for band in bands :
            bbflux[band] = bbflux[band][selection]
        bbflux['SOUTH']=bbflux['SOUTH'][selection]  
            
        nqso         = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # careful integration of bins, not just a simple interpolation
    qso_wave=np.linspace(args.wmin,args.wmax,int((args.wmax-args.wmin)/args.dwave)+1)
    qso_flux=np.zeros((tmp_qso_flux.shape[0],qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]) :
        qso_flux[q]=resample_flux(qso_wave,tmp_qso_wave,tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    if "MOCKID" in metadata.dtype.names :
        #log.warning("Using MOCKID as TARGETID")
        targetid=np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names :
        log.warning("Using ID as TARGETID")
        targetid=np.array(metadata["ID"]).astype(int)
    else :
        log.warning("No TARGETID")
        targetid=None

    specmeta={"HPXNSIDE":nside,"HPXPIXEL":pixel, "HPXNEST":hpxnest}

    if args.target_selection or args.bbflux :
        fibermap_columns = dict(
            FLUX_G = bbflux['FLUX_G'],
            FLUX_R = bbflux['FLUX_R'],
            FLUX_Z = bbflux['FLUX_Z'],
            FLUX_W1 = bbflux['FLUX_W1'],
            FLUX_W2 = bbflux['FLUX_W2'],
            )
        photsys = np.full(len(bbflux['FLUX_G']), 'N', dtype='S1')
        photsys[bbflux['SOUTH']] = b'S'
        fibermap_columns['PHOTSYS'] = photsys
    else :
        fibermap_columns=None

    # Attenuate the spectra for extinction
    if not sfdmap is None:
       Rv=3.1   #set by default
       indx=np.arange(metadata['RA'].size)
       extinction =Rv*ext_odonnell(qso_wave)
       EBV = sfdmap.ebv(metadata['RA'],metadata['DEC'], scaling=1.0)
       qso_flux *=10**( -0.4 * EBV[indx, np.newaxis] * extinction)
       if fibermap_columns is not None:
          fibermap_columns['EBV']=EBV
       EBV0=0.0
       EBV_med=np.median(EBV)
       Ag = 3.303 * (EBV_med - EBV0)
       exptime_fact=np.power(10.0, (2.0 * Ag / 2.5))
       obsconditions['EXPTIME']*=exptime_fact
       log.info("Dust extinction added")
       log.info('exposure time adjusted to {}'.format(obsconditions['EXPTIME']))

    sim_spectra(qso_wave,qso_flux, args.program, obsconditions=obsconditions,spectra_filename=ofilename,
                sourcetype="qso", skyerr=args.skyerr,ra=metadata["RA"],dec=metadata["DEC"],targetid=targetid,
                meta=specmeta,seed=seed,fibermap_columns=fibermap_columns,use_poisson=False) # use Poisson = False to get reproducible results.

    ### Keep input redshift
    Z_spec = metadata['Z'].copy()
    Z_input = metadata['Z'].copy()-DZ_FOG

    ### Add a shift to the redshift, simulating the systematic imprecision of redrock
    DZ_sys_shift = args.shift_kms_los/c*(1.+Z_input)
    log.info('Added a shift of {} km/s to the redshift'.format(args.shift_kms_los))
    meta['REDSHIFT'] += DZ_sys_shift
    metadata['Z'] += DZ_sys_shift

    ### Add a shift to the redshift, simulating the statistic imprecision of redrock
    if args.gamma_kms_zfit:
        log.info("Added zfit error with gamma {} to zbest".format(args.gamma_kms_zfit))
        DZ_stat_shift = mod_cauchy(loc=0,scale=args.gamma_kms_zfit,size=nqso,cut=3000)/c*(1.+Z_input)
        meta['REDSHIFT'] += DZ_stat_shift
        metadata['Z'] += DZ_stat_shift

    ## Write the truth file, including metadata for DLAs and BALs
    log.info('Writing a truth file  {}'.format(truth_filename))
    meta.rename_column('REDSHIFT','Z')
    meta.add_column(Column(Z_spec,name='TRUEZ'))
    meta.add_column(Column(Z_input,name='Z_INPUT'))
    meta.add_column(Column(DZ_FOG,name='DZ_FOG'))
    meta.add_column(Column(DZ_sys_shift,name='DZ_SYS'))
    if args.gamma_kms_zfit:
        meta.add_column(Column(DZ_stat_shift,name='DZ_STAT'))
    if 'Z_noRSD' in metadata.dtype.names:
        meta.add_column(Column(metadata['Z_noRSD'],name='Z_NORSD'))
    else:
        log.info('Z_noRSD field not present in transmission file. Z_NORSD not saved to truth file')

    #Save global seed and pixel seed to primary header
    hdr=pyfits.Header()
    hdr['GSEED']=global_seed
    hdr['PIXSEED']=seed
    hdu = pyfits.convenience.table_to_hdu(meta)
    hdu.header['EXTNAME'] = 'TRUTH'
    hduqso=pyfits.convenience.table_to_hdu(qsometa)
    hduqso.header['EXTNAME'] = 'QSO_META'
    hdulist=pyfits.HDUList([pyfits.PrimaryHDU(header=hdr),hdu,hduqso])
    if args.dla:
        hdulist.append(hdu_dla)
    if args.balprob:
        hdulist.append(hdu_bal)
    hdulist.writeto(truth_filename, overwrite=True)
    hdulist.close()




    if args.zbest :
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)
        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [
            ('CHI2', 'f8'),
            ('COEFF', 'f8' , (4,)),
            ('Z', 'f8'),
            ('ZERR', 'f8'),
            ('ZWARN', 'i8'),
            ('SPECTYPE', (str,96)),
            ('SUBTYPE', (str,16)),
            ('TARGETID', 'i8'),
            ('DELTACHI2', 'f8'),
            ('BRICKNAME', (str,8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest['CHI2'][:] = 0.
        zbest['Z'][:] = metadata['Z']
        zbest['ZERR'][:] = 0.
        zbest['ZWARN'][:] = 0
        zbest['SPECTYPE'][:] = 'QSO'
        zbest['SUBTYPE'][:] = ''
        zbest['TARGETID'][:] = metadata['MOCKID']
        zbest['DELTACHI2'][:] = 25.
        hzbest = pyfits.convenience.table_to_hdu(zbest); hzbest.name='ZBEST'
        hfmap  = pyfits.convenience.table_to_hdu(fibermap);  hfmap.name='FIBERMAP'
        hdulist =pyfits.HDUList([pyfits.PrimaryHDU(),hzbest,hfmap])
        hdulist.writeto(zbest_filename, overwrite=True)
        hdulist.close() # see if this helps with memory issue
コード例 #5
0
ファイル: quicklook.py プロジェクト: michaelJwilson/LBGCMB
def setup_pipeline(config):
    """
    Given a configuration from QLF, this sets up a pipeline [pa,qa] and also returns a
    conversion dictionary from the configuration dictionary so that Pipeline steps (PA) can
    take them. This is required for runpipeline.
    """
    import astropy.io.fits as fits
    import desispec.io.fibermap as fibIO
    import desispec.io.sky as skyIO
    import desispec.io.fiberflat as ffIO
    import desispec.fiberflat as ff
    import desispec.io.image as imIO
    import desispec.image as im
    import desispec.io.frame as frIO
    import desispec.frame as dframe
    from desispec.quicklook import procalgs
    from desispec.boxcar import do_boxcar

    qlog=qllogger.QLLogger("QuickLook",20)
    log=qlog.getlog()
    if config is None:
        return None
    log.info("Reading Configuration")
    if "RawImage" not in config:
        log.critical("Config is missing \"RawImage\" key.")
        sys.exit("Missing \"RawImage\" key.")
    inpname=config["RawImage"]
    if "FiberMap" not in config:
        log.critical("Config is missing \"FiberMap\" key.")
        sys.exit("Missing \"FiberMap\" key.")
    fibname=config["FiberMap"]
    proctype="Exposure"
    if "Camera" in config:
        camera=config["Camera"]
    if "DataType" in config:
        proctype=config["DataType"]
    debuglevel=20
    if "DebugLevel" in config:
        debuglevel=config["DebugLevel"]
        log.setLevel(debuglevel)
    hbeat=QLHB.QLHeartbeat(log,config["Period"],config["Timeout"])
    if config["Timeout"]> 200.0:
        log.warning("Heartbeat timeout exceeding 200.0 seconds")
    dumpintermediates=False
    if "DumpIntermediates" in config:
        dumpintermediates=config["DumpIntermediates"]

    biasimage=None #- This will be the converted dictionary key
    biasfile=None
    if "BiasImage" in config:
        biasfile=config["BiasImage"]

    darkimage=None
    darkfile=None
    if "DarkImage" in config:
        darkfile=config["DarkImage"]

    pixelflatfile=None
    pixflatimage=None
    if "PixelFlat" in config:
        pixelflatfile=config["PixelFlat"]

    fiberflatimagefile=None
    fiberflatimage=None
    if "FiberFlatImage" in config:
        fiberflatimagefile=config["FiberFlatImage"]

    arclampimagefile=None
    arclampimage=None
    if "ArcLampImage" in config:
        arclampimagefile=config["ArcLampImage"]

    fiberflatfile=None
    fiberflat=None
    if "FiberFlatFile" in config:
        if config["Flavor"] == 'arcs':
            pass
        else:
            fiberflatfile=config["FiberFlatFile"]

    skyfile=None
    skyimage=None
    if "SkyFile" in config:
        skyfile=config["SkyFile"]

    psf=None
    if config["Flavor"] == 'arcs':
        if not os.path.exists(os.path.join(os.environ['QL_SPEC_REDUX'],'calib2d','psf',config["Night"])):
            os.mkdir(os.path.join(os.environ['QL_SPEC_REDUX'],'calib2d','psf',config["Night"]))
        pass
    elif "PSFFile" in config:
        #from specter.psf import load_psf
        import desispec.psf
        psf=desispec.psf.PSF(config["PSFFile"])
        #psf=load_psf(config["PSFFile"])

    if "basePath" in config:
        basePath=config["basePath"]

    hbeat.start("Reading input file {}".format(inpname))
    inp=fits.open(inpname) #- reading raw image directly from astropy.io.fits
    hbeat.start("Reading fiberMap file {}".format(fibname))
    fibfile=fibIO.read_fibermap(fibname)
    fibhdr=fibfile.meta

    convdict={"FiberMap":fibfile}

    if psf is not None:
        convdict["PSFFile"]=psf

    if biasfile is not None:
        hbeat.start("Reading Bias Image {}".format(biasfile))
        biasimage=imIO.read_image(biasfile)
        convdict["BiasImage"]=biasimage

    if darkfile is not None:
        hbeat.start("Reading Dark Image {}".format(darkfile))
        darkimage=imIO.read_image(darkfile)
        convdict["DarkImage"]=darkimage

    if pixelflatfile:
        hbeat.start("Reading PixelFlat Image {}".format(pixelflatfile))
        pixelflatimage=imIO.read_image(pixelflatfile)
        convdict["PixelFlat"]=pixelflatimage

    if fiberflatimagefile:
        hbeat.start("Reading FiberFlat Image {}".format(fiberflatimagefile))
        fiberflatimage=imIO.read_image(fiberflatimagefile)
        convdict["FiberFlatImage"]=fiberflatimage

    if arclampimagefile:
        hbeat.start("Reading ArcLampImage {}".format(arclampimagefile))
        arclampimage=imIO.read_image(arclampimagefile)
        convdict["ArcLampImage"]=arclampimage

    if fiberflatfile:
        hbeat.start("Reading FiberFlat {}".format(fiberflatfile))
        fiberflat=ffIO.read_fiberflat(fiberflatfile)
        convdict["FiberFlatFile"]=fiberflat

    if skyfile:
        hbeat.start("Reading SkyModel file {}".format(skyfile))
        skymodel=skyIO.read_sky(skyfile)
        convdict["SkyFile"]=skymodel

    if dumpintermediates:
        convdict["DumpIntermediates"]=dumpintermediates
   
    hbeat.stop("Finished reading all static files")

    img=inp
    convdict["rawimage"]=img
    pipeline=[]
    for step in config["PipeLine"]:
        pa=getobject(step["PA"],log)
        if len(pipeline) == 0:
            if not pa.is_compatible(type(img)):
                log.critical("Pipeline configuration is incorrect! check configuration {} {}".format(img,pa.is_compatible(img)))
                sys.exit("Wrong pipeline configuration")
        else:
            if not pa.is_compatible(pipeline[-1][0].get_output_type()):
                log.critical("Pipeline configuration is incorrect! check configuration")
                log.critical("Can't connect input of {} to output of {}. Incompatible types".format(pa.name,pipeline[-1][0].name))
                sys.exit("Wrong pipeline configuration")
        qas=[]
        for q in step["QAs"]:
            qa=getobject(q,log)
            if not qa.is_compatible(pa.get_output_type()):
                log.warning("QA {} can not be used for output of {}. Skipping expecting {} got {} {}".format(qa.name,pa.name,qa.__inpType__,pa.get_output_type(),qa.is_compatible(pa.get_output_type())))
            else:
                qas.append(qa)
        pipeline.append([pa,qas])
    return pipeline,convdict
コード例 #6
0
ファイル: quickquasars.py プロジェクト: desihub/desisim
def simulate_one_healpix(ifilename,args,model,obsconditions,decam_and_wise_filters,
                         bassmzls_and_wise_filters,footprint_healpix_weight,
                         footprint_healpix_nside,
                         bal=None,sfdmap=None,eboss=None) :
    log = get_logger()

    # open filename and extract basic HEALPix information
    pixel, nside, hpxnest = get_healpix_info(ifilename)

    # using global seed (could be None) get seed for this particular pixel
    global_seed = args.seed
    seed = get_pixel_seed(pixel, nside, global_seed)
    # use this seed to generate future random numbers
    np.random.seed(seed)

    # get output file (we will write there spectra for this HEALPix pixel)
    ofilename = get_spectra_filename(args,nside,pixel)
    # get directory name (we will also write there zbest file)
    pixdir = os.path.dirname(ofilename)

    # get filename for truth file
    truth_filename = get_truth_filename(args,pixdir,nside,pixel)

    # get filename for zbest file
    zbest_filename = get_zbest_filename(args,pixdir,nside,pixel)

    if not args.overwrite :
        # check whether output exists or not
        if args.zbest :
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename) :
                log.info("skip existing {} and {}".format(ofilename,zbest_filename))
                return
        else : # only test spectra file
            if os.path.isfile(ofilename) :
                log.info("skip existing {}".format(ofilename))
                return

    # create sub-directories if required
    if len(pixdir)>0 :
        if not os.path.isdir(pixdir) :
            log.info("Creating dir {}".format(pixdir))
            os.makedirs(pixdir)

    log.info("Read skewers in {}, random seed = {}".format(ifilename,seed))

    # Read transmission from files. It might include DLA information, and it
    # might add metal transmission as well (from the HDU file).
    log.info("Read transmission file {}".format(ifilename))
    trans_wave, transmission, metadata, dla_info = read_lya_skewers(ifilename,read_dlas=(args.dla=='file'),add_metals=args.metals_from_file)

    ### Add Finger-of-God, before generate the continua
    log.info("Add FOG to redshift with sigma {} to quasar redshift".format(args.sigma_kms_fog))
    DZ_FOG = args.sigma_kms_fog/c*(1.+metadata['Z'])*np.random.normal(0,1,metadata['Z'].size)
    metadata['Z'] += DZ_FOG

    ### Select quasar within a given redshift range
    w = (metadata['Z']>=args.zmin) & (metadata['Z']<=args.zmax)
    transmission = transmission[w]
    metadata = metadata[:][w]
    DZ_FOG = DZ_FOG[w]

    # option to make for BOSS+eBOSS
    if not eboss is None:
        if args.downsampling or args.desi_footprint:
            raise ValueError("eboss option can not be run with "
                    +"desi_footprint or downsampling")

        # Get the redshift distribution from SDSS
        selection = sdss_subsample_redshift(metadata["RA"],metadata["DEC"],metadata['Z'],eboss['redshift'])
        log.info("Select QSOs in BOSS+eBOSS redshift distribution {} -> {}".format(metadata['Z'].size,selection.sum()))
        if selection.sum()==0:
            log.warning("No intersection with BOSS+eBOSS redshift distribution")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]

        # figure out the density of all quasars
        N_highz = metadata['Z'].size
        # area of healpix pixel, in degrees
        area_deg2 = healpy.pixelfunc.nside2pixarea(nside,degrees=True)
        input_highz_dens_deg2 = N_highz/area_deg2
        selection = sdss_subsample(metadata["RA"], metadata["DEC"],
                        input_highz_dens_deg2,eboss['footprint'])
        log.info("Select QSOs in BOSS+eBOSS footprint {} -> {}".format(transmission.shape[0],selection.size))
        if selection.size == 0 :
            log.warning("No intersection with BOSS+eBOSS footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]

    if args.desi_footprint :
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside, metadata["RA"], metadata["DEC"])
        selection = np.where(footprint_healpix_weight[footprint_healpix]>0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(transmission.shape[0],selection.size))
        if selection.size == 0 :
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]
        DZ_FOG = DZ_FOG[selection]



    nqso=transmission.shape[0]
    if args.downsampling is not None :
        if args.downsampling <= 0 or  args.downsampling > 1 :
           log.error("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
           raise ValueError("Down sampling fraction={} must be between 0 and 1".format(args.downsampling))
        indices = np.where(np.random.uniform(size=nqso)<args.downsampling)[0]
        if indices.size == 0 :
            log.warning("Down sampling from {} to 0 (by chance I presume)".format(nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        DZ_FOG = DZ_FOG[indices]
        nqso = transmission.shape[0]

    if args.nmax is not None :
        if args.nmax < nqso :
            log.info("Limit number of QSOs from {} to nmax={} (random subsample)".format(nqso,args.nmax))
            # take a random subsample
            indices = (np.random.uniform(size=args.nmax)*nqso).astype(int)
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            DZ_FOG = DZ_FOG[indices]
            nqso = args.nmax

    # In previous versions of the London mocks we needed to enforce F=1 for
    # z > z_qso here, but this is not needed anymore. Moreover, now we also
    # have metal absorption that implies F < 1 for z > z_qso
    #for ii in range(len(metadata)):
    #    transmission[ii][trans_wave>lambda_RF_LYA*(metadata[ii]['Z']+1)]=1.0

    # if requested, add DLA to the transmission skewers
    if args.dla is not None :

        # if adding random DLAs, we will need a new random generator
        if args.dla=='random':
            log.info('Adding DLAs randomly')
            random_state_just_for_dlas = np.random.RandomState(seed)
        elif args.dla=='file':
            log.info('Adding DLAs from transmission file')
        else:
            log.error("Wrong option for args.dla: "+args.dla)
            sys.exit(1)

        # if adding DLAs, the information will be printed here
        dla_filename=os.path.join(pixdir,"dla-{}-{}.fits".format(nside,pixel))
        dla_NHI, dla_z, dla_qid,dla_id = [], [], [],[]

        # identify minimum Lya redshift in transmission files
        min_lya_z = np.min(trans_wave/lambda_RF_LYA - 1)

        # loop over quasars in pixel

        for ii in range(len(metadata)):

            # quasars with z < min_z will not have any DLA in spectrum
            if min_lya_z>metadata['Z'][ii]: continue

            # quasar ID
            idd=metadata['MOCKID'][ii]
            dlas=[]

            if args.dla=='file':
                for dla in dla_info[dla_info['MOCKID']==idd]:

                    # Adding only DLAs with z < zqso
                    if dla['Z_DLA_RSD']>=metadata['Z'][ii]: continue
                    dlas.append(dict(z=dla['Z_DLA_RSD'],N=dla['N_HI_DLA'],dlaid=dla['DLAID']))
                transmission_dla = dla_spec(trans_wave,dlas)

            elif args.dla=='random':
                dlas, transmission_dla = insert_dlas(trans_wave, metadata['Z'][ii], rstate=random_state_just_for_dlas)
                for idla in dlas:
                   idla['dlaid']+=idd*1000      #Added to have unique DLA ids. Same format as DLAs from file.

            # multiply transmissions and store information for the DLA file
            if len(dlas)>0:
                transmission[ii] = transmission_dla * transmission[ii]
                dla_z += [idla['z'] for idla in dlas]
                dla_NHI += [idla['N'] for idla in dlas]
                dla_id += [idla['dlaid'] for idla in dlas]
                dla_qid += [idd]*len(dlas)
        log.info('Added {} DLAs'.format(len(dla_id)))
        # write file with DLA information
        if len(dla_id)>0:
            dla_meta=Table()
            dla_meta['NHI'] = dla_NHI
            dla_meta['Z_DLA'] = dla_z  #This is Z_DLA_RSD in transmision.
            dla_meta['TARGETID']=dla_qid
            dla_meta['DLAID'] = dla_id
            hdu_dla = pyfits.convenience.table_to_hdu(dla_meta)
            hdu_dla.name="DLA_META"
            del(dla_meta)
            log.info("DLA metadata to be saved in {}".format(truth_filename))
        else:
            hdu_dla=pyfits.PrimaryHDU()
            hdu_dla.name="DLA_META"

    # if requested, extend transmission skewers to cover full spectrum
    if args.target_selection or args.bbflux :
        wanted_min_wave = 3329. # needed to compute magnitudes for decam2014-r (one could have trimmed the transmission file ...)
        wanted_max_wave = 55501. # needed to compute magnitudes for wise2010-W2

        if trans_wave[0]>wanted_min_wave :
            log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(wanted_min_wave),int(trans_wave[-1])))
            # pad with ones at short wavelength, we assume F = 1 for z <~ 1.7
            # we don't need any wavelength resolution here
            new_trans_wave = np.append([wanted_min_wave,trans_wave[0]-0.01],trans_wave)
            new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
            new_transmission[:,2:] = transmission
            trans_wave   = new_trans_wave
            transmission = new_transmission

        if trans_wave[-1]<wanted_max_wave :
            log.info("Increase wavelength range from {}:{} to {}:{} to compute magnitudes".format(int(trans_wave[0]),int(trans_wave[-1]),int(trans_wave[0]),int(wanted_max_wave)))
            # pad with ones at long wavelength because we assume F = 1
            coarse_dwave = 2. # we don't care about resolution, we just need a decent QSO spectrum, there is no IGM transmission in this range
            n = int((wanted_max_wave-trans_wave[-1])/coarse_dwave)+1
            new_trans_wave = np.append(trans_wave,np.linspace(trans_wave[-1]+coarse_dwave,trans_wave[-1]+coarse_dwave*(n+1),n))
            new_transmission = np.ones((transmission.shape[0],new_trans_wave.size))
            new_transmission[:,:trans_wave.size] = transmission
            trans_wave   = new_trans_wave
            transmission = new_transmission

    # whether to use QSO or SIMQSO to generate quasar continua.  Simulate
    # spectra in the north vs south separately because they're on different
    # photometric systems.
    south = np.where( is_south(metadata['DEC']) )[0]
    north = np.where( ~is_south(metadata['DEC']) )[0]
    meta, qsometa = empty_metatable(nqso, objtype='QSO', simqso=not args.no_simqso)
    if args.no_simqso:
        log.info("Simulate {} QSOs with QSO templates".format(nqso))
        tmp_qso_flux = np.zeros([nqso, len(model.eigenwave)], dtype='f4')
        tmp_qso_wave = np.zeros_like(tmp_qso_flux)
    else:
        log.info("Simulate {} QSOs with SIMQSO templates".format(nqso))
        tmp_qso_flux = np.zeros([nqso, len(model.basewave)], dtype='f4')
        tmp_qso_wave = model.basewave

    for these, issouth in zip( (north, south), (False, True) ):

        # number of quasars in these
        nt = len(these)
        if nt<=0: continue

        if not eboss is None:
            # for eBOSS, generate only quasars with r<22
            magrange = (17.0, 21.3)
            _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
                = model.make_templates(nmodel=nt,
                    redshift=metadata['Z'][these], magrange=magrange,
                    lyaforest=False, nocolorcuts=True,
                    noresample=True, seed=seed, south=issouth)
        else:
            _tmp_qso_flux, _tmp_qso_wave, _meta, _qsometa \
                = model.make_templates(nmodel=nt,
                    redshift=metadata['Z'][these],
                    lyaforest=False, nocolorcuts=True,
                    noresample=True, seed=seed, south=issouth)

        _meta['TARGETID'] = metadata['MOCKID'][these]
        _qsometa['TARGETID'] = metadata['MOCKID'][these]
        meta[these] = _meta
        qsometa[these] = _qsometa
        tmp_qso_flux[these, :] = _tmp_qso_flux

        if args.no_simqso:
            tmp_qso_wave[these, :] = _tmp_qso_wave

    log.info("Resample to transmission wavelength grid")
    qso_flux=np.zeros((tmp_qso_flux.shape[0],trans_wave.size))
    if args.no_simqso:
        for q in range(tmp_qso_flux.shape[0]) :
            qso_flux[q]=np.interp(trans_wave,tmp_qso_wave[q],tmp_qso_flux[q])
    else:
        for q in range(tmp_qso_flux.shape[0]) :
            qso_flux[q]=np.interp(trans_wave,tmp_qso_wave,tmp_qso_flux[q])

    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    # if requested, add BAL features to the quasar continua
    if args.balprob:
        if args.balprob<=1. and args.balprob >0:
            log.info("Adding BALs with probability {}".format(args.balprob))
            # save current random state
            rnd_state = np.random.get_state()
            tmp_qso_flux,meta_bal=bal.insert_bals(tmp_qso_wave,tmp_qso_flux, metadata['Z'],
                                                  balprob=args.balprob,seed=seed)
            # restore random state to get the same random numbers later
            # as when we don't insert BALs
            np.random.set_state(rnd_state)
            meta_bal['TARGETID'] = metadata['MOCKID']
            w = meta_bal['TEMPLATEID']!=-1
            meta_bal = meta_bal[:][w]
            hdu_bal=pyfits.convenience.table_to_hdu(meta_bal); hdu_bal.name="BAL_META"
            del meta_bal
        else:
            balstr=str(args.balprob)
            log.error("BAL probability is not between 0 and 1 : "+balstr)
            sys.exit(1)

    # Multiply quasar continua by transmitted flux fraction
    # (at this point transmission file might include Ly-beta, metals and DLAs)
    log.info("Apply transmitted flux fraction")
    if not args.no_transmission:
        tmp_qso_flux = apply_lya_transmission(tmp_qso_wave,tmp_qso_flux,
                            trans_wave,transmission)

    # if requested, compute metal transmission on the fly
    # (if not included already from the transmission file)
    if args.metals is not None:
        if args.metals_from_file:
            log.error('you cannot add metals twice')
            raise ValueError('you cannot add metals twice')
        if args.no_transmission:
            log.error('you cannot add metals if asking for no-transmission')
            raise ValueError('can not add metals if using no-transmission')
        lstMetals = ''
        for m in args.metals: lstMetals += m+', '
        log.info("Apply metals: {}".format(lstMetals[:-2]))

        tmp_qso_flux = apply_metals_transmission(tmp_qso_wave,tmp_qso_flux,
                            trans_wave,transmission,args.metals)

    # if requested, compute magnitudes and apply target selection.  Need to do
    # this calculation separately for QSOs in the north vs south.
    bbflux=None
    if args.target_selection or args.bbflux :
        bands=['FLUX_G','FLUX_R','FLUX_Z', 'FLUX_W1', 'FLUX_W2']
        bbflux=dict()
        bbflux['SOUTH'] = is_south(metadata['DEC'])
        for band in bands:
            bbflux[band] = np.zeros(nqso)
        # need to recompute the magnitudes to account for lya transmission
        log.info("Compute QSO magnitudes")

        for these, filters in zip( (~bbflux['SOUTH'], bbflux['SOUTH']),
                                   (bassmzls_and_wise_filters, decam_and_wise_filters) ):
            if np.count_nonzero(these) > 0:
                maggies = filters.get_ab_maggies(1e-17 * tmp_qso_flux[these, :], tmp_qso_wave)
                for band, filt in zip( bands, maggies.colnames ):
                    bbflux[band][these] = np.ma.getdata(1e9 * maggies[filt]) # nanomaggies

    if args.target_selection :
        log.info("Apply target selection")
        isqso = np.ones(nqso, dtype=bool)
        for these, issouth in zip( (~bbflux['SOUTH'], bbflux['SOUTH']), (False, True) ):
            if np.count_nonzero(these) > 0:
                # optical cuts only if using QSO vs SIMQSO
                isqso[these] &= isQSO_colors(gflux=bbflux['FLUX_G'][these],
                                             rflux=bbflux['FLUX_R'][these],
                                             zflux=bbflux['FLUX_Z'][these],
                                             w1flux=bbflux['FLUX_W1'][these],
                                             w2flux=bbflux['FLUX_W2'][these],
                                             south=issouth, optical=args.no_simqso)

        log.info("Target selection: {}/{} QSOs selected".format(np.sum(isqso),nqso))
        selection=np.where(isqso)[0]
        if selection.size==0 : return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata     = metadata[:][selection]
        meta         = meta[:][selection]
        qsometa      = qsometa[:][selection]
        DZ_FOG      = DZ_FOG[selection]

        for band in bands :
            bbflux[band] = bbflux[band][selection]
        nqso         = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # careful integration of bins, not just a simple interpolation
    qso_wave=np.linspace(args.wmin,args.wmax,int((args.wmax-args.wmin)/args.dwave)+1)
    qso_flux=np.zeros((tmp_qso_flux.shape[0],qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]) :
        qso_flux[q]=resample_flux(qso_wave,tmp_qso_wave,tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    if "MOCKID" in metadata.dtype.names :
        #log.warning("Using MOCKID as TARGETID")
        targetid=np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names :
        log.warning("Using ID as TARGETID")
        targetid=np.array(metadata["ID"]).astype(int)
    else :
        log.warning("No TARGETID")
        targetid=None

    specmeta={"HPXNSIDE":nside,"HPXPIXEL":pixel, "HPXNEST":hpxnest}

    if args.target_selection or args.bbflux :
        fibermap_columns = dict(
            FLUX_G = bbflux['FLUX_G'],
            FLUX_R = bbflux['FLUX_R'],
            FLUX_Z = bbflux['FLUX_Z'],
            FLUX_W1 = bbflux['FLUX_W1'],
            FLUX_W2 = bbflux['FLUX_W2'],
            )
        photsys = np.full(len(bbflux['FLUX_G']), 'N', dtype='S1')
        photsys[bbflux['SOUTH']] = b'S'
        fibermap_columns['PHOTSYS'] = photsys
    else :
        fibermap_columns=None

    # Attenuate the spectra for extinction
    if not sfdmap is None:
       Rv=3.1   #set by default
       indx=np.arange(metadata['RA'].size)
       extinction =Rv*ext_odonnell(qso_wave)
       EBV = sfdmap.ebv(metadata['RA'],metadata['DEC'], scaling=1.0)
       qso_flux *=10**( -0.4 * EBV[indx, np.newaxis] * extinction)
       if fibermap_columns is not None:
          fibermap_columns['EBV']=EBV
       EBV0=0.0
       EBV_med=np.median(EBV)
       Ag = 3.303 * (EBV_med - EBV0)
       exptime_fact=np.power(10.0, (2.0 * Ag / 2.5))
       obsconditions['EXPTIME']*=exptime_fact
       log.info("Dust extinction added")
       log.info('exposure time adjusted to {}'.format(obsconditions['EXPTIME']))

    sim_spectra(qso_wave,qso_flux, args.program, obsconditions=obsconditions,spectra_filename=ofilename,
                sourcetype="qso", skyerr=args.skyerr,ra=metadata["RA"],dec=metadata["DEC"],targetid=targetid,
                meta=specmeta,seed=seed,fibermap_columns=fibermap_columns,use_poisson=False) # use Poisson = False to get reproducible results.

    ### Keep input redshift
    Z_spec = metadata['Z'].copy()
    Z_input = metadata['Z'].copy()-DZ_FOG

    ### Add a shift to the redshift, simulating the systematic imprecision of redrock
    DZ_sys_shift = args.shift_kms_los/c*(1.+Z_input)
    log.info('Added a shift of {} km/s to the redshift'.format(args.shift_kms_los))
    meta['REDSHIFT'] += DZ_sys_shift
    metadata['Z'] += DZ_sys_shift

    ### Add a shift to the redshift, simulating the statistic imprecision of redrock
    if args.gamma_kms_zfit:
        log.info("Added zfit error with gamma {} to zbest".format(args.gamma_kms_zfit))
        DZ_stat_shift = mod_cauchy(loc=0,scale=args.gamma_kms_zfit,size=nqso,cut=3000)/c*(1.+Z_input)
        meta['REDSHIFT'] += DZ_stat_shift
        metadata['Z'] += DZ_stat_shift

    ## Write the truth file, including metadata for DLAs and BALs
    log.info('Writing a truth file  {}'.format(truth_filename))
    meta.rename_column('REDSHIFT','Z')
    meta.add_column(Column(Z_spec,name='TRUEZ'))
    meta.add_column(Column(Z_input,name='Z_INPUT'))
    meta.add_column(Column(DZ_FOG,name='DZ_FOG'))
    meta.add_column(Column(DZ_sys_shift,name='DZ_SYS'))
    if args.gamma_kms_zfit:
        meta.add_column(Column(DZ_stat_shift,name='DZ_STAT'))
    if 'Z_noRSD' in metadata.dtype.names:
        meta.add_column(Column(metadata['Z_noRSD'],name='Z_NORSD'))
    else:
        log.info('Z_noRSD field not present in transmission file. Z_NORSD not saved to truth file')

    hdu = pyfits.convenience.table_to_hdu(meta)
    hdu.header['EXTNAME'] = 'TRUTH'
    hduqso=pyfits.convenience.table_to_hdu(qsometa)
    hduqso.header['EXTNAME'] = 'QSO_META'
    hdulist=pyfits.HDUList([pyfits.PrimaryHDU(),hdu,hduqso])
    if args.dla:
        hdulist.append(hdu_dla)
    if args.balprob:
        hdulist.append(hdu_bal)
    hdulist.writeto(truth_filename, overwrite=True)
    hdulist.close()




    if args.zbest :
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)
        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [
            ('CHI2', 'f8'),
            ('COEFF', 'f8' , (4,)),
            ('Z', 'f8'),
            ('ZERR', 'f8'),
            ('ZWARN', 'i8'),
            ('SPECTYPE', (str,96)),
            ('SUBTYPE', (str,16)),
            ('TARGETID', 'i8'),
            ('DELTACHI2', 'f8'),
            ('BRICKNAME', (str,8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest['CHI2'][:] = 0.
        zbest['Z'][:] = metadata['Z']
        zbest['ZERR'][:] = 0.
        zbest['ZWARN'][:] = 0
        zbest['SPECTYPE'][:] = 'QSO'
        zbest['SUBTYPE'][:] = ''
        zbest['TARGETID'][:] = metadata['MOCKID']
        zbest['DELTACHI2'][:] = 25.
        hzbest = pyfits.convenience.table_to_hdu(zbest); hzbest.name='ZBEST'
        hfmap  = pyfits.convenience.table_to_hdu(fibermap);  hfmap.name='FIBERMAP'
        hdulist =pyfits.HDUList([pyfits.PrimaryHDU(),hzbest,hfmap])
        hdulist.writeto(zbest_filename, overwrite=True)
        hdulist.close() # see if this helps with memory issue
コード例 #7
0
ファイル: quickGen.py プロジェクト: forero/desisim
if 'DESIMODEL' not in os.environ:
    raise RuntimeError('The environment variable DESIMODEL must be set.')
DESIMODEL_DIR=os.environ['DESIMODEL'] 


# Look for Directory tree/ environment set up
# Directory Tree is $DESI_SPECTRO_REDUX/$PRODNAME/exposures/NIGHT/EXPID/*.fits
# Perhaps can be synced with desispec findfile?

#But read fibermap file and extract the headers needed for Directory tree

#read fibermapfile to get objecttype,NIGHT and EXPID....
if args.fiberfile:
 
    print "Reading fibermap file %s"%(args.fiberfile)
    tbdata,hdr=fibermap.read_fibermap(args.fiberfile)
    fiber_hdulist=pyfits.open(args.fiberfile)
    objtype=tbdata['OBJTYPE'].copy()
    #need to replace STD object types with STAR since quicksim expects star instead of std
    stdindx=np.where(objtype=='STD') # match STD with STAR
    objtype[stdindx]='STAR'
    NIGHT=hdr['NIGHT']
    EXPID=hdr['EXPID']


else:
    print "Need Fibermap file"


#----------DESI_SPECTRO_REDUX--------
DESI_SPECTRO_REDUX_DIR="./quickGen"
コード例 #8
0
def simulate_one_healpix(ifilename, args, model, obsconditions,
                         decam_and_wise_filters, footprint_healpix_weight,
                         footprint_healpix_nside):

    log = get_logger()

    healpix = 0
    nside = 0
    vals = os.path.basename(ifilename).split(".")[0].split("-")
    if len(vals) < 3:
        log.error("Cannot guess nside and healpix from filename {}".format(
            ifilename))
        raise ValueError(
            "Cannot guess nside and healpix from filename {}".format(
                ifilename))
    try:
        healpix = int(vals[-1])
        nside = int(vals[-2])
    except ValueError:
        raise ValueError(
            "Cannot guess nside and healpix from filename {}".format(
                ifilename))

    zbest_filename = None
    if args.outfile:
        ofilename = args.outfile
    else:
        ofilename = os.path.join(
            args.outdir,
            "{}/{}/spectra-{}-{}.fits".format(healpix // 100, healpix, nside,
                                              healpix))
    pixdir = os.path.dirname(ofilename)

    if not args.overwrite:
        # check whether output exists or not

        if args.zbest:
            zbest_filename = os.path.join(
                pixdir, "zbest-{}-{}.fits".format(nside, healpix))
            if os.path.isfile(ofilename) and os.path.isfile(zbest_filename):
                log.info("skip existing {} and {}".format(
                    ofilename, zbest_filename))
                return
        else:  # only test spectra file
            if os.path.isfile(ofilename):
                log.info("skip existing {}".format(ofilename))
                return

    log.info("Read skewers in {}".format(ifilename))
    trans_wave, transmission, metadata = read_lya_skewers(ifilename)
    ok = np.where((metadata['Z'] >= args.zmin)
                  & (metadata['Z'] <= args.zmax))[0]
    transmission = transmission[ok]
    metadata = metadata[:][ok]

    # set seed now in case we are downsampling
    np.random.seed(args.seed)

    # create quasars

    if args.desi_footprint:
        footprint_healpix = footprint.radec2pix(footprint_healpix_nside,
                                                metadata["RA"],
                                                metadata["DEC"])
        selection = np.where(
            footprint_healpix_weight[footprint_healpix] > 0.99)[0]
        log.info("Select QSOs in DESI footprint {} -> {}".format(
            transmission.shape[0], selection.size))
        if selection.size == 0:
            log.warning("No intersection with DESI footprint")
            return
        transmission = transmission[selection]
        metadata = metadata[:][selection]

    nqso = transmission.shape[0]
    if args.downsampling is not None:
        if args.downsampling <= 0 or args.downsampling > 1:
            log.error(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
            raise ValueError(
                "Down sampling fraction={} must be between 0 and 1".format(
                    args.downsampling))
        indices = np.where(np.random.uniform(size=nqso) < args.downsampling)[0]
        if indices.size == 0:
            log.warning(
                "Down sampling from {} to 0 (by chance I presume)".format(
                    nqso))
            return
        transmission = transmission[indices]
        metadata = metadata[:][indices]
        nqso = transmission.shape[0]

    if args.nmax is not None:
        if args.nmax < nqso:
            log.info(
                "Limit number of QSOs from {} to nmax={} (random subsample)".
                format(nqso, args.nmax))
            # take a random subsample
            indices = (np.random.uniform(size=args.nmax) * nqso).astype(int)
            transmission = transmission[indices]
            metadata = metadata[:][indices]
            nqso = args.nmax

    log.info("Simulate {} QSOs".format(nqso))
    tmp_qso_flux, tmp_qso_wave, meta = model.make_templates(
        nmodel=nqso,
        redshift=metadata['Z'],
        seed=args.seed,
        lyaforest=False,
        nocolorcuts=True,
        noresample=True)

    log.info("Resample to transmission wavelength grid")
    # because we don't want to alter the transmission field with resampling here
    qso_flux = np.zeros((tmp_qso_flux.shape[0], trans_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = np.interp(trans_wave, tmp_qso_wave, tmp_qso_flux[q])
    tmp_qso_flux = qso_flux
    tmp_qso_wave = trans_wave

    log.info("Apply lya")
    tmp_qso_flux = apply_lya_transmission(tmp_qso_wave, tmp_qso_flux,
                                          trans_wave, transmission)

    if args.target_selection:
        log.info("Compute QSO magnitudes for target selection")
        maggies = decam_and_wise_filters.get_ab_maggies(1e-17 * tmp_qso_flux,
                                                        tmp_qso_wave.copy(),
                                                        mask_invalid=True)
        for band, filt in zip(
            ('FLUX_G', 'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2'),
            ('decam2014-g', 'decam2014-r', 'decam2014-z', 'wise2010-W1',
             'wise2010-W2')):
            meta[band] = np.ma.getdata(1e9 * maggies[filt])  # nanomaggies
        isqso = isQSO_colors(gflux=meta['FLUX_G'],
                             rflux=meta['FLUX_R'],
                             zflux=meta['FLUX_Z'],
                             w1flux=meta['FLUX_W1'],
                             w2flux=meta['FLUX_W2'])
        log.info("Target selection: {}/{} QSOs selected".format(
            np.sum(isqso), nqso))
        selection = np.where(isqso)[0]
        if selection.size == 0: return
        tmp_qso_flux = tmp_qso_flux[selection]
        metadata = metadata[:][selection]
        meta = meta[:][selection]
        nqso = selection.size

    log.info("Resample to a linear wavelength grid (needed by DESI sim.)")
    # we need a linear grid. for this resampling we take care of integrating in bins
    # we do not do a simple interpolation
    qso_wave = np.linspace(args.wmin, args.wmax,
                           int((args.wmax - args.wmin) / args.dwave) + 1)
    qso_flux = np.zeros((tmp_qso_flux.shape[0], qso_wave.size))
    for q in range(tmp_qso_flux.shape[0]):
        qso_flux[q] = resample_flux(qso_wave, tmp_qso_wave, tmp_qso_flux[q])

    log.info("Simulate DESI observation and write output file")
    pixdir = os.path.dirname(ofilename)
    if not os.path.isdir(pixdir):
        log.info("Creating dir {}".format(pixdir))
        os.makedirs(pixdir)

    if "MOCKID" in metadata.dtype.names:
        #log.warning("Using MOCKID as TARGETID")
        targetid = np.array(metadata["MOCKID"]).astype(int)
    elif "ID" in metadata.dtype.names:
        log.warning("Using ID as TARGETID")
        targetid = np.array(metadata["ID"]).astype(int)
    else:
        log.warning("No TARGETID")
        targetid = None

    sim_spectra(qso_wave,
                qso_flux,
                args.program,
                obsconditions=obsconditions,
                spectra_filename=ofilename,
                seed=args.seed,
                sourcetype="qso",
                skyerr=args.skyerr,
                ra=metadata["RA"],
                dec=metadata["DEC"],
                targetid=targetid)

    if args.zbest:
        log.info("Read fibermap")
        fibermap = read_fibermap(ofilename)

        log.info("Writing a zbest file {}".format(zbest_filename))
        columns = [('CHI2', 'f8'), ('COEFF', 'f8', (4, )), ('Z', 'f8'),
                   ('ZERR', 'f8'), ('ZWARN', 'i8'), ('SPECTYPE', (str, 96)),
                   ('SUBTYPE', (str, 16)), ('TARGETID', 'i8'),
                   ('DELTACHI2', 'f8'), ('BRICKNAME', (str, 8))]
        zbest = Table(np.zeros(nqso, dtype=columns))
        zbest["CHI2"][:] = 0.
        zbest["Z"] = metadata['Z']
        zbest["ZERR"][:] = 0.
        zbest["ZWARN"][:] = 0
        zbest["SPECTYPE"][:] = "QSO"
        zbest["SUBTYPE"][:] = ""
        zbest["TARGETID"] = fibermap["TARGETID"]
        zbest["DELTACHI2"][:] = 25.

        hzbest = pyfits.convenience.table_to_hdu(zbest)
        hzbest.name = "ZBEST"
        hfmap = pyfits.convenience.table_to_hdu(fibermap)
        hfmap.name = "FIBERMAP"

        hdulist = pyfits.HDUList([pyfits.PrimaryHDU(), hzbest, hfmap])
        hdulist.writeto(zbest_filename, clobber=True)
        hdulist.close()  # see if this helps with memory issue